initial commit
This commit is contained in:
commit
5ca1185c90
|
|
@ -0,0 +1,21 @@
|
|||
# This config is equivalent to both the '.circleci/extended/orb-free.yml' and the base '.circleci/config.yml'
|
||||
version: 2.1
|
||||
|
||||
# Orbs are reusable packages of CircleCI configuration that you may share across projects, enabling you to create encapsulated, parameterized commands, jobs, and executors that can be used across multiple projects.
|
||||
# See: https://circleci.com/docs/2.0/orb-intro/
|
||||
orbs:
|
||||
node: circleci/node@5.1.0
|
||||
|
||||
# Invoke jobs via workflows
|
||||
# See: https://circleci.com/docs/2.0/configuration-reference/#workflows
|
||||
workflows:
|
||||
sample: # This is the name of the workflow, feel free to change it to better match your workflow.
|
||||
# Inside the workflow, you define the jobs you want to run.
|
||||
jobs:
|
||||
- node/test:
|
||||
# This is the node version to use for the `cimg/node` tag
|
||||
# Relevant tags can be found on the CircleCI Developer Hub
|
||||
# https://circleci.com/developer/images/image/cimg/node
|
||||
version: '18.14.2'
|
||||
# If you are using yarn, change the line below from "npm" to "yarn"
|
||||
pkg-manager: npm
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
node_modules
|
||||
npm-debug.log
|
||||
Dockerfile
|
||||
.git
|
||||
.gitignore
|
||||
build
|
||||
README.md
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
insert_final_newline = true
|
||||
|
|
@ -0,0 +1,67 @@
|
|||
HOST=0.0.0.0
|
||||
PORT=3000
|
||||
SERVICE_NAME='your_service_name'
|
||||
|
||||
#JWT CONFIGURATION
|
||||
JWT_KEY='your_secret'
|
||||
SECRET='my_super_secret'
|
||||
HASH=10
|
||||
#JWT_PRIVATE_SECRET='jwt-private-secret'
|
||||
#JWT_PUBLIC_SECRET='jwt-public-secret'
|
||||
|
||||
#GOOGLE CLOUD CONFIGURATION
|
||||
#Go to GCP and create a service account and replace all the fields with yours in the json file
|
||||
GOOGLE_APPLICATION_CREDENTIALS='./src/config/gcloud/google-application-credentials.json'
|
||||
GOOGLE_PROJECT_ID='your_google_project_id'
|
||||
GOOGLE_STORAGE_BUCKET_NAME='your_google_storage_bucket_name'
|
||||
GOOGLE_CLIENT_ID='your_google_client_id'
|
||||
GOOGLE_CLIENT_SECRET='your_google_client_secret'
|
||||
GOOGLE_MAPS_API_KEY='your_google_maps_api_key'
|
||||
|
||||
#CLIENT CONFIGURATION
|
||||
CLIENT_URL='your_client_url_to_authorize'
|
||||
|
||||
#MONGO DB CONFIGURATION
|
||||
MONGO_URI='your_mongo_db_connection'
|
||||
MONGO_URI_TEST='your_mongo_db_connection_test'
|
||||
MONGO_USER='your_mongo_user'
|
||||
MONGO_PASS='your_mongo_password'
|
||||
|
||||
#MYSQL CONFIGURATION
|
||||
MYSQL_HOST_STAGE='your_myql_host_stage'
|
||||
MYSQL_USER_STAGE='your_myql_user'
|
||||
MYSQL_PASSWORD_STAGE='your_myql_pass'
|
||||
MYSQL_DB_STAGE='your_myql_db_name'
|
||||
MYSQL_SOCKET_STAGE='/your/socket-cloud-sql'
|
||||
|
||||
MYSQL_HOST_PROD='your_myql_host_stage'
|
||||
MYSQL_USER_PROD='your_myql_user'
|
||||
MYSQL_PASSWORD_PROD='your_myql_pass'
|
||||
MYSQL_DB_PROD='your_myql_db_name'
|
||||
MYSQL_SOCKET_PROD='/your/socket-cloud-sql'
|
||||
|
||||
|
||||
#SPARKPOST CONFIGURATION
|
||||
SPARKPOST_API_KEY='your_sparkpost_test_api_key'
|
||||
#SPARKPOST_API_KEY='your_sparkpost_live_api_key'
|
||||
SPARKPOST_SENDER_DOMAIN='your_sparkpost_sender_domain'
|
||||
|
||||
# MESSAGEBIRD CONFIGURATION
|
||||
MESSAGEBIRD_ACCESS_KEY='your_messagbird_access_key' #test key
|
||||
#MESSAGEBIRD_ACCESS_KEY='your_messagbird_access_key' #live key
|
||||
MESSAGEBIRD_WHATSAPP_CHANNEL_ID='your_messagebird_whatsapp_channel_id'
|
||||
MESSAGEBIRD_TEMPLATE_NAMESPACE_ID='your_messagebird_template_namespace_id'
|
||||
|
||||
#SENDGRID CONFIGURATION
|
||||
SENDGRID_API_KEY='your_sendgrid_api_key'
|
||||
SENDGRID_SENDER_EMAIL='your_sendgrid_email_sender'
|
||||
|
||||
#TWILIO CONFIGURATION
|
||||
TWILIO_ACCOUNT_SID='your_twilio_account_sid'
|
||||
TWILIO_AUTH_TOKEN='your_twilio_account_token'
|
||||
TWILIO_PHONE_NUMBER='+your_phone_number'
|
||||
|
||||
|
||||
#PUB/SUB TOPICS
|
||||
TOPIC_NAME='your_pubbus_topic_name'
|
||||
SUBSCRIPTION_NAME='your_pubsub_subscription_name'
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
build/
|
||||
node_modules/
|
||||
docs/
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"extends": "./node_modules/gts/",
|
||||
"rules": {
|
||||
"no-process-exit": "off"
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
.gcloudignore
|
||||
.git
|
||||
.gitignore
|
||||
node_modules/
|
||||
#!include:.gitignore
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Desktop (please complete the following information):**
|
||||
- OS: [e.g. iOS]
|
||||
- Browser [e.g. chrome, safari]
|
||||
- Version [e.g. 22]
|
||||
|
||||
**Smartphone (please complete the following information):**
|
||||
- Device: [e.g. iPhone6]
|
||||
- OS: [e.g. iOS8.1]
|
||||
- Browser [e.g. stock browser, safari]
|
||||
- Version [e.g. 22]
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
|
|
@ -0,0 +1,140 @@
|
|||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# Snowpack dependency directory (https://snowpack.dev/)
|
||||
web_modules/
|
||||
|
||||
# TypeScript cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Optional stylelint cache
|
||||
.stylelintcache
|
||||
|
||||
# Microbundle cache
|
||||
.rpt2_cache/
|
||||
.rts2_cache_cjs/
|
||||
.rts2_cache_es/
|
||||
.rts2_cache_umd/
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variable files
|
||||
.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.local
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
.cache
|
||||
.parcel-cache
|
||||
|
||||
# Next.js build output
|
||||
.next
|
||||
out
|
||||
|
||||
# Nuxt.js build / generate output
|
||||
.nuxt
|
||||
dist
|
||||
|
||||
# Gatsby files
|
||||
.cache/
|
||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||
# public
|
||||
|
||||
# vuepress build output
|
||||
.vuepress/dist
|
||||
|
||||
# vuepress v2.x temp and cache directory
|
||||
.temp
|
||||
.cache
|
||||
|
||||
# Docusaurus cache and generated files
|
||||
.docusaurus
|
||||
|
||||
# Serverless directories
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
.dynamodb/
|
||||
|
||||
# TernJS port file
|
||||
.tern-port
|
||||
|
||||
# Stores VSCode versions used for testing VSCode extensions
|
||||
.vscode-test
|
||||
|
||||
# yarn v2
|
||||
.yarn/cache
|
||||
.yarn/unplugged
|
||||
.yarn/build-state.yml
|
||||
.yarn/install-state.gz
|
||||
.pnp.*
|
||||
|
||||
|
||||
build
|
||||
docs
|
||||
docs/
|
||||
|
||||
|
||||
src/config/gcloud/google-web-client-secret.json
|
||||
src/config/gcloud/google-application-credentials.json
|
||||
target/
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
node_modules
|
||||
build
|
||||
dist
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
module.exports = {
|
||||
...require('gts/.prettierrc.json')
|
||||
}
|
||||
|
|
@ -0,0 +1,76 @@
|
|||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as
|
||||
contributors and maintainers pledge to make participation in our project and
|
||||
our community a harassment-free experience for everyone, regardless of age, body
|
||||
size, disability, ethnicity, sex characteristics, gender identity and expression,
|
||||
level of experience, education, socio-economic status, nationality, personal
|
||||
appearance, race, religion, or sexual identity and orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
- Using welcoming and inclusive language
|
||||
- Being respectful of differing viewpoints and experiences
|
||||
- Gracefully accepting constructive criticism
|
||||
- Focusing on what is best for the community
|
||||
- Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
- The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
- Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
- Public or private harassment
|
||||
- Publishing others' private information, such as a physical or electronic
|
||||
address, without explicit permission
|
||||
- Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable
|
||||
behavior and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||
permanently any contributor for other behaviors that they deem inappropriate,
|
||||
threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all project spaces, and it also applies when
|
||||
an individual is representing the project or its community in public spaces.
|
||||
Examples of representing a project or community include using an official
|
||||
project e-mail address, posting via an official social media account, or acting
|
||||
as an appointed representative at an online or offline event. Representation of
|
||||
a project may be further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting the project team at [INSERT EMAIL ADDRESS]. All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||
faith may face temporary or permanent repercussions as determined by other
|
||||
members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see
|
||||
https://www.contributor-covenant.org/faq
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
# Contributing to Project
|
||||
|
||||
All contributions are welcome!
|
||||
|
||||
For contributing to this project, please:
|
||||
* fork the repository to your own account
|
||||
* clone the repository
|
||||
* make changes
|
||||
* submit a pull request on `development` branch
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
FROM node:18-alpine as base
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
EXPOSE 3000
|
||||
|
||||
FROM base as builder
|
||||
COPY ["package.json", "package-lock.json*", "./"]
|
||||
COPY ./tsconfig.json ./tsconfig.json
|
||||
COPY ./src ./src
|
||||
RUN npm ci --only-production
|
||||
RUN npm run compile
|
||||
RUN npm prune --production
|
||||
|
||||
FROM base as release
|
||||
ENV NODE_ENV=production
|
||||
USER node
|
||||
COPY --chown=node:node --from=builder /usr/src/app/node_modules ./node_modules
|
||||
COPY --chown=node:node --from=builder /usr/src/app/build ./build
|
||||
COPY --chown=node:node . /usr/src/app
|
||||
CMD ["node", "./build/src/bin/server"]
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2023 Giuseppe Albrizio
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
|
@ -0,0 +1,189 @@
|
|||
[](https://dl.circleci.com/status-badge/redirect/gh/giuseppealbrizio/typescript-rest-api-backend/tree/main)
|
||||
|
||||
[](https://github.com/google/gts)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
|
||||
[](https://github.com/Envoy-VC/awesome-badges)
|
||||
[](https://github.com/Envoy-VC/awesome-badges)
|
||||
|
||||
# Typescript REST API Backend Template
|
||||
|
||||
## Feel free to support this project
|
||||
|
||||
If you found this project helpful, please consider supporting me by buying me a coffee! Your support will help me to keep creating more useful content and improving this project.
|
||||
|
||||
[](https://www.buymeacoffee.com/galbrizio)
|
||||
|
||||
---
|
||||
|
||||
#### Typescript REST microservice boilerplate using node.js and express and some other cool stuff
|
||||
|
||||
This template is intended to be used as single service in a REST multi-service application using Cloud Pub/Sub as
|
||||
message broker
|
||||
|
||||
use in local with Skaffold and in cloud with GKE
|
||||
|
||||
To know more about how to implement GKE and run with Skaffold please refer to this folder:
|
||||
|
||||
`./infra`
|
||||
|
||||
The application uses express as framework and is configured with the following features:
|
||||
|
||||
- `ECMA2022` features enabled
|
||||
- `Dotenv` Load environment variables from .env file
|
||||
- `Eslint` Code quality tool
|
||||
- `Prettier` to prettify the code
|
||||
- `MongoDB` ready to go configuration with mongoose
|
||||
- `MySQL` ready to go configuration with mysql2
|
||||
- `CORS` feature enabled
|
||||
- `RBAC` logic to authorize people with specific roles to use the endpoints.
|
||||
- `Passport` logic to add an authentication layer if neeeded.
|
||||
- `Sparkpost` email service support with sparkpost.
|
||||
- `Error Handling` errors custom middleware and helpers globally configured
|
||||
- `Multer` File uploading configured to use in routes as middleware
|
||||
- `Google Cloud Storage` middleware configured to use Google Cloud Storage as upload bucket
|
||||
- `Google Cloud Pub/Sub` pub/sub support for event driven events added
|
||||
- `Axios` globally configured in `./src/utils/api.utils.js`
|
||||
- `Swagger` documentation reachable at `http://localhost:3000/api/v1/docs`
|
||||
- `Jest` Testing tool support
|
||||
- `Logger` Logging support with Winston
|
||||
- `Docker` ready configuration with multi-stage option
|
||||
- `Terraform` ready configuration to instantiate infrastracture in GCP
|
||||
- `Agenda` ready to emit events through agenda jobs
|
||||
- `Best practices` in naming files
|
||||
|
||||
## Basic Information
|
||||
|
||||
- App entry point is located in `./src/index.ts`
|
||||
|
||||
- Server config entrypoint is located in `./src/bin/server.ts`
|
||||
|
||||
- Prettier config is located at `./.prettierrc.js`
|
||||
|
||||
- Eslint config is located at `./.eslintrc`
|
||||
|
||||
- Sparkpost service support is located at `./src/services/email/sparkport.service.ts`
|
||||
|
||||
- You can define your own email services in this file
|
||||
|
||||
- Mongo config is located at `./src/config/mongodb.config.ts`
|
||||
|
||||
- MYSQL config is located at `./src/config/mysql.config.ts`
|
||||
|
||||
- Error Handling middleware is located at `./src/middlewares/errorHandler.middleware.ts`
|
||||
|
||||
- You can configure as many errors you need in `./src/errors/`
|
||||
|
||||
- Multer middleware is located at `./src/middlewares/upload.middleware.ts`
|
||||
|
||||
- If you want to use Google Cloud Storage as upload bucket follow instructions at `./src/config/gcloud/README.md`
|
||||
|
||||
- RBAC logic middleware is located at `./src/middlewares/verifyApiRights.middleware.ts`
|
||||
|
||||
- Swagger config file is located at `./src/api/swagger/swagger.route.js`
|
||||
|
||||
- Swagger routes are defined in `./src/api/swagger/swagger.route.ts`
|
||||
|
||||
- Docker config is located at `./Dockerfile`
|
||||
|
||||
- Pub/Sub service is located at `./src/services/pubsub/pub-sub.service.js`
|
||||
|
||||
## Folder Structure
|
||||
|
||||
> `infra/`
|
||||
>
|
||||
> - **For more information about the k8s configuration please check the README file**
|
||||
> - **`k8s`** - folder contains all production kubernetes manifests
|
||||
> - **`k8s-dev`** - folder contains all development kubernetes manifests to run with skaffold
|
||||
> - **`scripts`** - older contains all script related to the creation of a cluster or running skaffold or secret
|
||||
> creation
|
||||
>
|
||||
> `src/`
|
||||
>
|
||||
> - **`api/`** - containing all api logic with model, services, controller and routes
|
||||
> - **`bin/`** - server configuration folder
|
||||
> - **`config/`** - this folder contains all the configs file (database, passport, etc...)
|
||||
> - **`constants/`** - this folder contains all the global constants
|
||||
> - **`logs/`** - the logger file will be stored here
|
||||
> - **`helpers/`** - some helpers func i.e. an error helper that returns json everytime an error comes in
|
||||
> - **`middlewares/`** - here you can find all the custom middlewares
|
||||
> - **`services/`** - here we store all the services; i.e. here we define methods to manipulate a db model entity
|
||||
> - **`tests/`** - here we store all the jest test
|
||||
> - **`utils/`** - containing some utils function to be reused in the code (i.e. axios global configuration)
|
||||
|
||||
## Getting Started
|
||||
|
||||
Copy the .env.example to .env. Be sure to fill all the global variables. Alternatively you can use the script `generate-env.sh` in the scripts folder. This script will generate a `.env.test.local` and you can copy this file to .env
|
||||
|
||||
```bash
|
||||
cp env.example .env
|
||||
```
|
||||
|
||||
Then replace:
|
||||
|
||||
1. `MONGO_URI` string with your Mongo connection
|
||||
1. `MONGO_URI_TEST` string with your Mongo Test connection
|
||||
2. `MYSQL_HOST_STAGE` string with your mysql host name
|
||||
- `MYSQL_USER_STAGE` string with your mysql username
|
||||
- `MYSQL_PASSWORD_STAGE` string with your mysql password name
|
||||
- `MYSQL_DB_STAGE` string with your mysql db name
|
||||
- `MYSQL_SOCKET_STAGE` string with your mysql socket name
|
||||
3. `GOOGLE_APPLICATION_CREDENTIALS` path with yours
|
||||
4. `GOOGLE_PROJECT_ID` with yours
|
||||
5. `SENDGRID_API_KEY` with yours
|
||||
6. `SENDGRID_SENDER_EMAIL` with yours
|
||||
|
||||
In order to Google Cloud Storage works follow instructions located in `./src/config/gcloud/README.md`
|
||||
|
||||
---
|
||||
|
||||
To get started with this repo npm install in the root folder
|
||||
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
To getting started with a dev environment. Here we use nodemon and babel-node to restart the server asa we change
|
||||
something
|
||||
|
||||
```bash
|
||||
npm run start:dev
|
||||
```
|
||||
|
||||
To compile the code and create a production build
|
||||
|
||||
```bash
|
||||
npm run compile
|
||||
```
|
||||
|
||||
This command will create a build in the root directory
|
||||
|
||||
To start with a production ready build you can run this command
|
||||
|
||||
```bash
|
||||
# This set the NODE_ENV to production, npm-run-all, create a build and run the server command
|
||||
npm run start
|
||||
```
|
||||
|
||||
If you have a build and you want to node the build you can run
|
||||
|
||||
```bash
|
||||
# This command launch the node instance inside the ./build/bin/server
|
||||
npm run server
|
||||
```
|
||||
|
||||
## Docker Ready
|
||||
|
||||
### Here we use the multistage build to optimize speed and size of the final image
|
||||
|
||||
If you use Docker and wanna dockerize the app you can run the command
|
||||
|
||||
```bash
|
||||
docker build -t <dockerhubusername>/<docker-image-name>:<tag> .
|
||||
```
|
||||
|
||||
then
|
||||
|
||||
```bash
|
||||
docker run --name <docker-process-name> -d - p 3000:3000 <dockerhubusername>/<docker-image-name>:<tag>
|
||||
```
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
#INFRASTRUCTURE FOLDER
|
||||
|
||||
This folder should be moved to the root folder where all the services are located.
|
||||
|
||||
Replace all the key-value pairs with yours
|
||||
|
||||
- `k8s` folder contains all production kubernetes manifests
|
||||
- `k8s-dev` folder contains all development kubernetes manifests to run with skaffold
|
||||
- `scripts` folder contains all script related to the creation of a cluster or running skaffold or secret creation
|
||||
|
||||
## Skaffold File
|
||||
|
||||
For production environment: `./k8s/skaffold.yaml`
|
||||
|
||||
For development environment: `./k8s-dev/skaffold.yaml`
|
||||
|
||||
Remember to put this file in the root of multi-services project. Depending on the environment, you should specify the
|
||||
correct skaffold configuration.
|
||||
|
||||
- If you use Docker, you should install NGINX at this link
|
||||
[NGINX x Docker](https://kubernetes.github.io/ingress-nginx/deploy/)
|
||||
|
||||
## TASK TO MAKE THIS WORK
|
||||
|
||||
1. Create a project in GCP
|
||||
2. Go to `./scripts/gke-autopilot.sh` and change the <google-cloud-project-id> with your project id.
|
||||
3. Launch the script with `chmod +x gke-autopilot.sh && ./gke-autopilot.sh`
|
||||
4. Just in case context is not changed, you should change with `kubectl config use-context <clustern-name>`
|
||||
5. Put the file `skaffold.yaml` in your root folder where all the services are located.
|
||||
6. For each YAML file change the `project-id`, `servicename` and all other env variables with your
|
||||
7. After you changed all the configuration files you can launch skaffold command with `skaffold run`
|
||||
|
||||
## USEFUL COMANDS
|
||||
|
||||
- Change the context of kubernetes
|
||||
|
||||
```bash
|
||||
kubectl config use-context <clustern-name>
|
||||
```
|
||||
|
||||
- Build the container in gcloud with the command. In the root where Dockerfile is located
|
||||
|
||||
```bash
|
||||
gcloud builds submit --tag gcr.io/<gcp-project-id>/<image-name> .
|
||||
```
|
||||
|
||||
- CREATE SECRET FROM JSON FILE
|
||||
- google-application-credentials = the name of the secret to be stored
|
||||
- google-application-credentials.json = the file name and the file will be stored in a volume
|
||||
- ./google-application-credentials.json = the actual file downloaded and that is in the config folder
|
||||
|
||||
```bash
|
||||
kubectl create secret generic google-application-credentials --from-file=google-application-credentials.json=./google-application-credentials.json
|
||||
```
|
||||
|
|
@ -0,0 +1,229 @@
|
|||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: servicename-depl
|
||||
namespace: default
|
||||
labels:
|
||||
app: servicename
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: servicename
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: servicename
|
||||
spec:
|
||||
volumes:
|
||||
- name: google-cloud-keys
|
||||
secret:
|
||||
secretName: google-application-credentials
|
||||
|
||||
- name: proxy-to-another-gcp-project # name of the volumes that contain the proxy to another gcp project
|
||||
secret:
|
||||
secretName: proxy-to-another-gcp-project-secret
|
||||
containers:
|
||||
- name: servicename
|
||||
#Local Configuration
|
||||
image: org_name/project_name/servicename:latest
|
||||
volumeMounts:
|
||||
- name: google-cloud-keys
|
||||
mountPath: /var/secrets/google
|
||||
env:
|
||||
#SERVICE CONFIGURATION
|
||||
- name: HOST
|
||||
value: '0.0.0.0'
|
||||
- name: SERVICE_NAME
|
||||
value: 'your-service-name'
|
||||
- name: PORT
|
||||
value: '3000'
|
||||
- name: HASH
|
||||
value: '10'
|
||||
#JWT CONFIGURATION
|
||||
- name: JWT_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: JWT_KEY
|
||||
- name: SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: SECRET
|
||||
#MONGO CONFIGURATION
|
||||
- name: MONGO_URI
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MONGO_URI_TEST # We use the test one also in mongouri. this happen cause when launch skaffold in local it has node_env production
|
||||
- name: MONGO_URI_TEST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MONGO_URI_TEST
|
||||
#GOOGLE CLOUD CONFIGURATION
|
||||
- name: GOOGLE_APPLICATION_CREDENTIALS
|
||||
value: '/var/secrets/google/google-application-credentials.json'
|
||||
- name: GOOGLE_PROJECT_ID
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: GOOGLE_PROJECT_ID
|
||||
- name: GOOGLE_CLOUD_PROJECT
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: GOOGLE_CLOUD_PROJECT
|
||||
- name: GOOGLE_STORAGE_BUCKET_NAME
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: GOOGLE_STORAGE_BUCKET_NAME
|
||||
- name: GOOGLE_CLIENT_ID
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: GOOGLE_CLIENT_ID
|
||||
- name: GOOGLE_CLIENT_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: GOOGLE_CLIENT_SECRET
|
||||
- name: GOOGLE_MAPS_API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: GOOGLE_MAPS_API_KEY
|
||||
#SPARKPOST CONFIGURATION
|
||||
- name: SPARKPOST_API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: SPARKPOST_API_KEY
|
||||
- name: SPARKPOST_SENDER_DOMAIN
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: SPARKPOST_SENDER_DOMAIN
|
||||
#MESSAGEBIRD CONFIGURATION
|
||||
- name: MESSAGEBIRD_ACCESS_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MESSAGEBIRD_ACCESS_KEY
|
||||
- name: MESSAGEBIRD_WHATSAPP_CHANNEL_ID
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MESSAGEBIRD_WHATSAPP_CHANNEL_ID
|
||||
- name: MESSAGEBIRD_TEMPLATE_NAMESPACE_ID
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MESSAGEBIRD_TEMPLATE_NAMESPACE_ID
|
||||
- name: MESSAGEBIRD_TEMPLATE_NAME_TEST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MESSAGEBIRD_TEMPLATE_NAME_TEST
|
||||
#MYSQL CONFIGURATION
|
||||
- name: MYSQL_HOST_STAGE
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MYSQL_HOST_STAGE
|
||||
- name: MYSQL_USER_STAGE
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MYSQL_USER_STAGE
|
||||
- name: MYSQL_PASSWORD_STAGE
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MYSQL_PASSWORD_STAGE
|
||||
- name: MYSQL_DB_STAGE
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MYSQL_DB_STAGE
|
||||
- name: MYSQL_SOCKET_STAGE
|
||||
value: '/cloudsql/your-socket-name'
|
||||
|
||||
- name: MYSQL_HOST_PROD
|
||||
value: '127.0.0.1' #we use localhost because we mounted a cloud proxy sql
|
||||
- name: MYSQL_USER_PROD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MYSQL_USER_PROD
|
||||
- name: MYSQL_PASSWORD_PROD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MYSQL_PASSWORD_PROD
|
||||
- name: MYSQL_DB_PROD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MYSQL_DB_PROD
|
||||
- name: MYSQL_SOCKET_PROD
|
||||
value: '/cloudsql/your-cloudsql-socket'
|
||||
|
||||
- name: cloud-sql-proxy
|
||||
# It is recommended to use the latest version of the Cloud SQL proxy
|
||||
# Make sure to update on a regular schedule!
|
||||
image: gcr.io/cloud-sql-connectors/cloud-sql-proxy:2.1.0
|
||||
args:
|
||||
# If connecting from a VPC-native GKE cluster, you can use the
|
||||
# following flag to have the proxy connect over private IP
|
||||
# - "--private-ip"
|
||||
|
||||
# Enable structured logging with LogEntry format:
|
||||
- "--structured-logs"
|
||||
|
||||
# Defaults: MySQL: 3306, Postgres: 5432, SQLServer: 1433
|
||||
# Replace DB_PORT with the port the proxy should listen on
|
||||
- "--port=3306"
|
||||
- "cloud-sql-instances=instance-name"
|
||||
|
||||
# [START cloud_sql_proxy_k8s_volume_mount]
|
||||
# This flag specifies where the service account key can be found
|
||||
- '--credentials-file=/var/secrets/google/proxy-to-another-gcp-project.json'
|
||||
securityContext:
|
||||
# The default Cloud SQL proxy image runs as the
|
||||
# "nonroot" user and group (uid: 65532) by default.
|
||||
runAsNonRoot: true
|
||||
volumeMounts:
|
||||
- name: proxy-to-another-gcp-project
|
||||
mountPath: /var/secrets/google
|
||||
readOnly: true
|
||||
# [END cloud_sql_proxy_k8s_volume_mount]
|
||||
# Resource configuration depends on an application's requirements. You
|
||||
# should adjust the following values based on what your application
|
||||
# needs. For details, see https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/
|
||||
resources:
|
||||
requests:
|
||||
# The proxy's memory use scales linearly with the number of active
|
||||
# connections. Fewer open connections will use less memory. Adjust
|
||||
# this value based on your application's requirements.
|
||||
memory: '2Gi'
|
||||
# The proxy's CPU use scales linearly with the amount of IO between
|
||||
# the database and the application. Adjust this value based on your
|
||||
# application's requirements.
|
||||
cpu: '1'
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: servicename-srv
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
app: servicename
|
||||
ports:
|
||||
- name: servicename
|
||||
protocol: TCP
|
||||
port: 3000
|
||||
targetPort: 3000
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: ingress-service
|
||||
annotations:
|
||||
#Local configuration - Remember to install nginx
|
||||
kubernetes.io/ingress.class: nginx
|
||||
nginx.ingress.kubernetes.io/use-regex: 'true'
|
||||
nginx.ingress.kubernetes.io/enable-cors: 'true'
|
||||
nginx.ingress.kubernetes.io/cors-allow-methods: 'GET, HEAD, PUT, PATCH, POST, DELETE, OPTIONS'
|
||||
nginx.ingress.kubernetes.io/cors-allow-origin: 'http://localhost:3000'
|
||||
nginx.ingress.kubernetes.io/cors-allow-credentials: 'true'
|
||||
nginx.ingress.kubernetes.io/proxy-body-size: 8m
|
||||
spec:
|
||||
rules:
|
||||
- host: testrestapi.eu.ngrok.io
|
||||
http:
|
||||
paths:
|
||||
# Client implementation of React or a frontend client in general that doesn't have api versioning
|
||||
- path: /?(.*)
|
||||
pathType: ImplementationSpecific
|
||||
backend:
|
||||
service:
|
||||
name: clientservicename-srv
|
||||
port:
|
||||
number: 3000
|
||||
|
||||
- path: /api/v1/service-1-name/?(.*)
|
||||
pathType: ImplementationSpecific
|
||||
backend:
|
||||
service:
|
||||
name: servicename-srv
|
||||
port:
|
||||
number: 3000
|
||||
|
||||
- path: /api/v1/service-2-name/?(.*)
|
||||
pathType: ImplementationSpecific
|
||||
backend:
|
||||
service:
|
||||
name: servicename2-srv
|
||||
port:
|
||||
number: 3000
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: google-application-credentials #name of the secret to be mounted
|
||||
type: Opaque
|
||||
stringData: #file name that will be created to mount
|
||||
google-application-credentials.json: |
|
||||
{
|
||||
"type": "service_account",
|
||||
"project_id": "your-project-id",
|
||||
"private_key_id": "your-private-key-id",
|
||||
"private_key": "your-private-key",
|
||||
"client_email": "service-account-email",
|
||||
"client_id": "your-client-id",
|
||||
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||
"token_uri": "https://oauth2.googleapis.com/token",
|
||||
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
||||
"client_x509_cert_url": "client_x509_cert_url"
|
||||
}
|
||||
|
||||
#The same result can be achieved by using this kubectl command in the folder where google-application-credentials.json is
|
||||
#kubectl create secret generic google-application-credentials --from-file=google-application-credentials.json=./google-application-credentials.json
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: proxy-to-another-gcp-project-secret #name of the secret to be mounted
|
||||
type: Opaque
|
||||
stringData: #file name that will be created to mount
|
||||
proxy-to-another-gcp-project.json: |
|
||||
{
|
||||
"type": "service_account",
|
||||
"project_id": "your-project-id",
|
||||
"private_key_id": "your-private-key-id",
|
||||
"private_key": "your-private-key",
|
||||
"client_email": "service-account-email",
|
||||
"client_id": "your-client-id",
|
||||
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||
"token_uri": "https://oauth2.googleapis.com/token",
|
||||
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
||||
"client_x509_cert_url": "client_x509_cert_url"
|
||||
}
|
||||
|
||||
#The same result can be achieved by using this kubectl command in the folder where google-application-credentials.json is
|
||||
#kubectl create secret generic google-application-credentials --from-file=google-application-credentials.json=./google-application-credentials.json
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: shared-secrets
|
||||
data:
|
||||
#JWT CONFIGURATION
|
||||
JWT_KEY: <base64-base64-value>
|
||||
SECRET: <base64-base64-value>
|
||||
|
||||
#MONGODB CONFIGURATION:
|
||||
MONGO_URI: <base64-value>
|
||||
MONGO_URI_TEST: <base64-value>
|
||||
|
||||
#GOOGLE CLOUD CONFIGURATION
|
||||
GOOGLE_PROJECT_ID: <base64-value>
|
||||
GOOGLE_CLOUD_PROJECT: <base64-value>
|
||||
GOOGLE_STORAGE_BUCKET_NAME: <base64-value>
|
||||
GOOGLE_CLIENT_ID: <base64-value>
|
||||
GOOGLE_CLIENT_SECRET: <base64-value>
|
||||
GOOGLE_MAPS_API_KEY: <base64-value>
|
||||
|
||||
#SPARKPOST CONFIGURATION
|
||||
SPARKPOST_API_KEY: <base64-value> #Use test key here
|
||||
SPARKPOST_SENDER_DOMAIN: <base64-value>
|
||||
|
||||
# MESSAGEBIRD CONFIGURATION
|
||||
MESSAGEBIRD_ACCESS_KEY: <base64-value> #Use test key here
|
||||
MESSAGEBIRD_WHATSAPP_CHANNEL_ID: <base64-value>
|
||||
MESSAGEBIRD_TEMPLATE_NAMESPACE_ID: <base64-value>
|
||||
MESSAGEBIRD_TEMPLATE_NAME_TEST: <base64-value>
|
||||
|
||||
#MYSQL CONFIGURATION SECRECTS
|
||||
MYSQL_HOST_STAGE: <base64-value>
|
||||
MYSQL_USER_STAGE: <base64-value>
|
||||
MYSQL_PASSWORD_STAGE: <base64-value>
|
||||
MYSQL_DB_STAGE: <base64-value>
|
||||
MYSQL_SOCKET_STAGE: <base64-value> #not necessary
|
||||
|
||||
MYSQL_HOST_PROD: <base64-value>
|
||||
MYSQL_USER_PROD: <base64-value>
|
||||
MYSQL_PASSWORD_PROD: <base64-value>
|
||||
MYSQL_DB_PROD: <base64-value>
|
||||
MYSQL_SOCKET_PROD: <base64-value> #not necessary
|
||||
|
||||
|
||||
#kubectl create secret generic jwt-secret --from-literal=JWT_KEY=JWT_SECRET
|
||||
|
||||
#Don't forget to create the google-application-credentials secret with
|
||||
#kubectl create secret generic google-application-credentials --from-file=google-application-credentials.json=./google-application-credentials.json
|
||||
|
|
@ -0,0 +1,47 @@
|
|||
apiVersion: skaffold/v4beta1
|
||||
kind: Config
|
||||
metadata:
|
||||
name: project-id #project id
|
||||
build:
|
||||
artifacts:
|
||||
#Local configuration
|
||||
# Client context of React
|
||||
- image: org_name/project_name/client-servicename
|
||||
context: client-service-folder
|
||||
sync:
|
||||
manual:
|
||||
- src: ./src/**/*.ts
|
||||
dest: .
|
||||
- src: "***/*.html"
|
||||
dest: .
|
||||
- src: "***/*.css"
|
||||
dest: .
|
||||
docker:
|
||||
dockerfile: Dockerfile
|
||||
# Service 1 context
|
||||
- image: org_name/project_name/servicename
|
||||
context: service-folder #folder where codebase is stored
|
||||
sync:
|
||||
manual:
|
||||
- src: src/**/*.ts
|
||||
dest: .
|
||||
docker:
|
||||
dockerfile: Dockerfile
|
||||
- image: org_name/project_name/servicename2
|
||||
context: service2-folder
|
||||
sync:
|
||||
manual:
|
||||
- src: src/**/*.ts
|
||||
dest: .
|
||||
docker:
|
||||
dockerfile: Dockerfile
|
||||
tagPolicy:
|
||||
sha256: {} #this tag policy uses the tag latest of image
|
||||
#Local configuration
|
||||
local:
|
||||
push: false
|
||||
manifests:
|
||||
rawYaml:
|
||||
- ./infra/k8s-dev/*
|
||||
deploy:
|
||||
kubectl: {}
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
apiVersion: networking.gke.io/v1
|
||||
kind: ManagedCertificate
|
||||
metadata:
|
||||
name: project-id-certificate
|
||||
spec:
|
||||
domains:
|
||||
- domainname.com
|
||||
- api.domainname.com
|
||||
|
|
@ -0,0 +1,262 @@
|
|||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: servicename-depl
|
||||
namespace: default
|
||||
labels:
|
||||
app: servicename
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: servicename
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: servicename
|
||||
spec:
|
||||
volumes:
|
||||
- name: google-cloud-keys
|
||||
secret:
|
||||
secretName: google-application-credentials
|
||||
|
||||
- name: proxy-to-another-gcp-project # name of the volumes that contain the proxy to another gcp project
|
||||
secret:
|
||||
secretName: proxy-to-another-gcp-project-secret
|
||||
containers:
|
||||
- name: servicename
|
||||
#Cloud Configuration
|
||||
image: europe-west1-docker.pkg.dev/your-artifact-repository/servicename:latest
|
||||
imagePullPolicy: Always
|
||||
# Liveness Probe Configuration
|
||||
livenessProbe:
|
||||
failureThreshold: 3
|
||||
httpGet:
|
||||
path: /api/v1/servicename/
|
||||
port: 3000
|
||||
scheme: HTTP
|
||||
initialDelaySeconds: 60
|
||||
periodSeconds: 60
|
||||
successThreshold: 1
|
||||
timeoutSeconds: 10
|
||||
# Readiness Probe Configuration
|
||||
readinessProbe:
|
||||
failureThreshold: 3
|
||||
httpGet:
|
||||
path: /api/v1/servicename/
|
||||
port: 3000
|
||||
scheme: HTTP
|
||||
initialDelaySeconds: 60
|
||||
periodSeconds: 60
|
||||
successThreshold: 1
|
||||
timeoutSeconds: 10
|
||||
|
||||
volumeMounts:
|
||||
- name: google-cloud-keys
|
||||
mountPath: /var/secrets/google
|
||||
ports:
|
||||
- containerPort: 3000
|
||||
env:
|
||||
#SERVICE CONFIGURATION
|
||||
- name: HOST
|
||||
value: '0.0.0.0'
|
||||
- name: SERVICE_NAME
|
||||
value: 'your_service_name'
|
||||
- name: PORT
|
||||
value: '3000'
|
||||
- name: HASH
|
||||
value: '10'
|
||||
|
||||
#JWT CONFIGURATION
|
||||
- name: JWT_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: JWT_KEY
|
||||
- name: SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: SECRET
|
||||
|
||||
#MONGO CONFIGURATION
|
||||
- name: MONGO_URI
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MONGO_URI # We use the test one also in mongouri. this happen cause when launch skaffold in local it has node_env production
|
||||
- name: MONGO_URI_TEST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MONGO_URI_TEST
|
||||
|
||||
#GOOGLE CLOUD CONFIGURATION
|
||||
- name: GOOGLE_APPLICATION_CREDENTIALS
|
||||
value: '/var/secrets/google/google-application-credentials.json'
|
||||
- name: GOOGLE_PROJECT_ID
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: GOOGLE_PROJECT_ID
|
||||
- name: GOOGLE_CLOUD_PROJECT
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: GOOGLE_CLOUD_PROJECT
|
||||
- name: GOOGLE_STORAGE_BUCKET_NAME
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: GOOGLE_STORAGE_BUCKET_NAME
|
||||
- name: GOOGLE_CLIENT_ID
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: GOOGLE_CLIENT_ID
|
||||
- name: GOOGLE_CLIENT_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: GOOGLE_CLIENT_SECRET
|
||||
- name: GOOGLE_MAPS_API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: GOOGLE_MAPS_API_KEY
|
||||
|
||||
#SPARKPOST CONFIGURATION
|
||||
- name: SPARKPOST_API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: SPARKPOST_API_KEY
|
||||
- name: SPARKPOST_SENDER_DOMAIN
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: SPARKPOST_SENDER_DOMAIN
|
||||
|
||||
#MESSAGEBIRD CONFIGURATION
|
||||
- name: MESSAGEBIRD_ACCESS_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MESSAGEBIRD_ACCESS_KEY
|
||||
- name: MESSAGEBIRD_WHATSAPP_CHANNEL_ID
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MESSAGEBIRD_WHATSAPP_CHANNEL_ID
|
||||
- name: MESSAGEBIRD_TEMPLATE_NAMESPACE_ID
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MESSAGEBIRD_TEMPLATE_NAMESPACE_ID
|
||||
- name: MESSAGEBIRD_TEMPLATE_NAME_TEST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MESSAGEBIRD_TEMPLATE_NAME_TEST
|
||||
|
||||
#MYSQL CONFIGURATION
|
||||
- name: MYSQL_HOST_STAGE
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MYSQL_HOST_STAGE
|
||||
- name: MYSQL_USER_STAGE
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MYSQL_USER_STAGE
|
||||
- name: MYSQL_PASSWORD_STAGE
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MYSQL_PASSWORD_STAGE
|
||||
- name: MYSQL_DB_STAGE
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MYSQL_DB_STAGE
|
||||
- name: MYSQL_SOCKET_STAGE
|
||||
value: '/cloudsql/your-socket-name'
|
||||
|
||||
- name: MYSQL_HOST_PROD
|
||||
value: '127.0.0.1' #we use localhost because we mounted a cloud proxy sql
|
||||
- name: MYSQL_USER_PROD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MYSQL_USER_PROD
|
||||
- name: MYSQL_PASSWORD_PROD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MYSQL_PASSWORD_PROD
|
||||
- name: MYSQL_DB_PROD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: shared-secrets
|
||||
key: MYSQL_DB_PROD
|
||||
- name: MYSQL_SOCKET_PROD
|
||||
value: '/cloudsql/your-socket-name'
|
||||
|
||||
- name: cloud-sql-proxy
|
||||
# It is recommended to use the latest version of the Cloud SQL proxy
|
||||
# Make sure to update on a regular schedule!
|
||||
image: gcr.io/cloud-sql-connectors/cloud-sql-proxy:2.1.0
|
||||
args:
|
||||
# If connecting from a VPC-native GKE cluster, you can use the
|
||||
# following flag to have the proxy connect over private IP
|
||||
# - "--private-ip"
|
||||
|
||||
# Enable structured logging with LogEntry format:
|
||||
- '--structured-logs'
|
||||
|
||||
# Defaults: MySQL: 3306, Postgres: 5432, SQLServer: 1433
|
||||
# Replace DB_PORT with the port the proxy should listen on
|
||||
- '--port=3306'
|
||||
- 'cloud-sql-instances=instance-name'
|
||||
|
||||
# [START cloud_sql_proxy_k8s_volume_mount]
|
||||
# This flag specifies where the service account key can be found
|
||||
- '--credentials-file=/var/secrets/google/proxy-to-another-gcp-project.json'
|
||||
securityContext:
|
||||
# The default Cloud SQL proxy image runs as the
|
||||
# "nonroot" user and group (uid: 65532) by default.
|
||||
runAsNonRoot: true
|
||||
volumeMounts:
|
||||
- name: proxy-to-another-gcp-project
|
||||
mountPath: /var/secrets/google
|
||||
readOnly: true
|
||||
# [END cloud_sql_proxy_k8s_volume_mount]
|
||||
# Resource configuration depends on an application's requirements. You
|
||||
# should adjust the following values based on what your application
|
||||
# needs. For details, see https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/
|
||||
resources:
|
||||
requests:
|
||||
# The proxy's memory use scales linearly with the number of active
|
||||
# connections. Fewer open connections will use less memory. Adjust
|
||||
# this value based on your application's requirements.
|
||||
memory: '2Gi'
|
||||
# The proxy's CPU use scales linearly with the amount of IO between
|
||||
# the database and the application. Adjust this value based on your
|
||||
# application's requirements.
|
||||
cpu: '1'
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: servicename-srv
|
||||
spec:
|
||||
# type: ClusterIP
|
||||
type: NodePort
|
||||
selector:
|
||||
app: servicename
|
||||
ports:
|
||||
- name: servicename
|
||||
protocol: TCP
|
||||
port: 3000
|
||||
targetPort: 3000
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: ingress-service
|
||||
annotations:
|
||||
#Cloud Configuration
|
||||
kubernetes.io/ingress.class: gce
|
||||
kubernetes.io/ingress.global-static-ip-name: project-id-static-ip
|
||||
networking.gke.io/managed-certificates: project-id-certificate
|
||||
spec:
|
||||
rules:
|
||||
- host: domainname.com
|
||||
http:
|
||||
paths:
|
||||
- path: /*
|
||||
pathType: ImplementationSpecific
|
||||
backend:
|
||||
service:
|
||||
name: clientservicename-srv
|
||||
port:
|
||||
number: 3000
|
||||
|
||||
- path: /api/v1/servicename/*
|
||||
pathType: ImplementationSpecific
|
||||
backend:
|
||||
service:
|
||||
name: servicename-srv
|
||||
port:
|
||||
number: 3000
|
||||
|
||||
- path: /api/v1/servicename2/*
|
||||
pathType: ImplementationSpecific
|
||||
backend:
|
||||
service:
|
||||
name: servicename2-srv
|
||||
port:
|
||||
number: 3000
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: google-application-credentials #name of the secret to be mounted
|
||||
type: Opaque
|
||||
stringData: #file name that will be created to mount
|
||||
google-application-credentials.json: |
|
||||
{
|
||||
"type": "service_account",
|
||||
"project_id": "your-project-id",
|
||||
"private_key_id": "your-private-key-id",
|
||||
"private_key": "your-private-key",
|
||||
"client_email": "service-account-email",
|
||||
"client_id": "your-client-id",
|
||||
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||
"token_uri": "https://oauth2.googleapis.com/token",
|
||||
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
||||
"client_x509_cert_url": "client_x509_cert_url"
|
||||
}
|
||||
|
||||
#The same result can be achieved by using this kubectl command in the folder where google-application-credentials.json is
|
||||
#kubectl create secret generic google-application-credentials --from-file=google-application-credentials.json=./google-application-credentials.json
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: proxy-to-another-gcp-project-secret #name of the secret to be mounted
|
||||
type: Opaque
|
||||
stringData: #file name that will be created to mount
|
||||
proxy-to-another-gcp-project.json: |
|
||||
{
|
||||
"type": "service_account",
|
||||
"project_id": "your-project-id",
|
||||
"private_key_id": "your-private-key-id",
|
||||
"private_key": "your-private-key",
|
||||
"client_email": "service-account-email",
|
||||
"client_id": "your-client-id",
|
||||
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||
"token_uri": "https://oauth2.googleapis.com/token",
|
||||
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
||||
"client_x509_cert_url": "client_x509_cert_url"
|
||||
}
|
||||
|
||||
#The same result can be achieved by using this kubectl command in the folder where google-application-credentials.json is
|
||||
#kubectl create secret generic google-application-credentials --from-file=google-application-credentials.json=./google-application-credentials.json
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: shared-secrets
|
||||
data:
|
||||
#JWT CONFIGURATION
|
||||
JWT_KEY: <base64-base64-value>
|
||||
SECRET: <base64-base64-value>
|
||||
|
||||
#MONGODB CONFIGURATION:
|
||||
MONGO_URI: <base64-value>
|
||||
MONGO_URI_TEST: <base64-value>
|
||||
|
||||
#GOOGLE CLOUD CONFIGURATION
|
||||
GOOGLE_PROJECT_ID: <base64-value>
|
||||
GOOGLE_CLOUD_PROJECT: <base64-value>
|
||||
GOOGLE_STORAGE_BUCKET_NAME: <base64-value>
|
||||
GOOGLE_CLIENT_ID: <base64-value>
|
||||
GOOGLE_CLIENT_SECRET: <base64-value>
|
||||
GOOGLE_MAPS_API_KEY: <base64-value>
|
||||
|
||||
#SPARKPOST CONFIGURATION
|
||||
SPARKPOST_API_KEY: <base64-value> #Use test key here
|
||||
SPARKPOST_SENDER_DOMAIN: <base64-value>
|
||||
|
||||
# MESSAGEBIRD CONFIGURATION
|
||||
MESSAGEBIRD_ACCESS_KEY: <base64-value> #Use test key here
|
||||
MESSAGEBIRD_WHATSAPP_CHANNEL_ID: <base64-value>
|
||||
MESSAGEBIRD_TEMPLATE_NAMESPACE_ID: <base64-value>
|
||||
MESSAGEBIRD_TEMPLATE_NAME_TEST: <base64-value>
|
||||
|
||||
#MYSQL CONFIGURATION SECRECTS
|
||||
MYSQL_HOST_STAGE: <base64-value>
|
||||
MYSQL_USER_STAGE: <base64-value>
|
||||
MYSQL_PASSWORD_STAGE: <base64-value>
|
||||
MYSQL_DB_STAGE: <base64-value>
|
||||
MYSQL_SOCKET_STAGE: <base64-value> #not necessary
|
||||
|
||||
MYSQL_HOST_PROD: <base64-value>
|
||||
MYSQL_USER_PROD: <base64-value>
|
||||
MYSQL_PASSWORD_PROD: <base64-value>
|
||||
MYSQL_DB_PROD: <base64-value>
|
||||
MYSQL_SOCKET_PROD: <base64-value> #not necessary
|
||||
|
||||
|
||||
#kubectl create secret generic jwt-secret --from-literal=JWT_KEY=JWT_SECRET
|
||||
|
||||
#Don't forget to create the google-application-credentials secret with
|
||||
#kubectl create secret generic google-application-credentials --from-file=google-application-credentials.json=./google-application-credentials.json
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
# SKAFFOLD CONFIGURATION FOR PRODUCTION
|
||||
apiVersion: skaffold/v4beta1
|
||||
kind: Config
|
||||
metadata:
|
||||
name: project-id #project id
|
||||
build:
|
||||
artifacts:
|
||||
# Client context of React
|
||||
- image: europe-west1-docker.pkg.dev/your_artifact_url/client-servicename
|
||||
context: client-service
|
||||
sync:
|
||||
manual:
|
||||
- src: ./src/**/*.ts
|
||||
dest: .
|
||||
- src: "***/*.html"
|
||||
dest: .
|
||||
- src: "***/*.css"
|
||||
dest: .
|
||||
docker:
|
||||
dockerfile: Dockerfile
|
||||
- image: europe-west1-docker.pkg.dev/your_artifact_url/servicename
|
||||
context: service-folder
|
||||
sync:
|
||||
manual:
|
||||
- src: ./src/**/*.ts
|
||||
dest: .
|
||||
docker:
|
||||
dockerfile: Dockerfile
|
||||
tagPolicy:
|
||||
sha256: {}
|
||||
googleCloudBuild:
|
||||
projectId: your-google-cloud-project-id
|
||||
manifests:
|
||||
rawYaml:
|
||||
- ./infra/k8s/*
|
||||
deploy:
|
||||
kubectl: {}
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
# Local .terraform directories
|
||||
**/.terraform/*
|
||||
.terraform/
|
||||
|
||||
|
||||
# .tfstate files
|
||||
*.tfstate
|
||||
*.tfstate.*
|
||||
|
||||
# Crash log files
|
||||
crash.log
|
||||
crash.*.log
|
||||
|
||||
# Exclude all .tfvars files, which are likely to contain sensitive data, such as
|
||||
# password, private keys, and other secrets. These should not be part of version
|
||||
# control as they are data points which are potentially sensitive and subject
|
||||
# to change depending on the environment.
|
||||
*.tfvars
|
||||
*.tfvars.json
|
||||
|
||||
# Ignore override files as they are usually used to override resources locally and so
|
||||
# are not checked in
|
||||
override.tf
|
||||
override.tf.json
|
||||
*_override.tf
|
||||
*_override.tf.json
|
||||
|
||||
# Include override files you do wish to add to version control using negated pattern
|
||||
# !example_override.tf
|
||||
|
||||
# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan
|
||||
# example: *tfplan*
|
||||
|
||||
# Ignore CLI configuration files
|
||||
.terraformrc
|
||||
terraform.rc
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
# This file is maintained automatically by "terraform init".
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.terraform.io/hashicorp/google" {
|
||||
version = "4.55.0"
|
||||
constraints = "4.55.0"
|
||||
hashes = [
|
||||
"h1:GMfPJSl9+PS3tHmHmUMo/4CkJ9/tHvZwV2aVp050Fcc=",
|
||||
"zh:0a82a76dc4bbe05418075f88830f73ad3ca9d56d83a172faaf3306b016219d52",
|
||||
"zh:367e3c0ce96ab8f9ec3e1fab5a4f9a48b3b5b336622b36b828f75bf6fb663001",
|
||||
"zh:51fd41c7508c4c39830e5c2885bc053e90d5d24fc90462235b69394185b7fa1d",
|
||||
"zh:7ebe62261c522631d22ab06951d0d6a1bf629b98aea5d9fe2e2e50ca256cf395",
|
||||
"zh:9dd119eca735471d61fe9e4cc45e8c257275e2e9f4da30fba7296fc7ae8de99e",
|
||||
"zh:a4426a0d24dcf8b3899e17530fabb3fb5791ff7db65404c26e66b031a8422bd2",
|
||||
"zh:c1e93a786b6d014610c3f83fda12b3044009947f729b2042635fa66d9f387c47",
|
||||
"zh:ea0703ee2f5e3732077e946cfa5cdd85119ef4ecc898a2affdeef9de9f92fe4e",
|
||||
"zh:ecada51dd406f46e9fce7dafb0b8ef3a671b8d572dbc1d39d9fdc137029f5275",
|
||||
"zh:effb91791080a86ff130b517bce5253aed1372ad2c6f9cfb252375a196b9f730",
|
||||
"zh:f1885b811a31e37d53bd780d2485c19754ee2db0a66affeb5e788aa9b1950b8c",
|
||||
"zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c",
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
REMEMBER TO ADD A FILE CALLED `terraform.tfvars` with the following options:
|
||||
|
||||
with the following options
|
||||
|
||||
```
|
||||
region = "europe-west1"
|
||||
zone = "europe-west1-b"
|
||||
location = "EU"
|
||||
project = "development-test-skeldon"
|
||||
environment = "prod"
|
||||
app_name = "test-rest-api-app"
|
||||
```
|
||||
|
||||
Then run terraform commands as usual.
|
||||
|
||||
## Terraform commands could be:
|
||||
|
||||
```bash
|
||||
terraform init # only the first time
|
||||
terraform fmt # to format the code
|
||||
terraform validate # to validate the code
|
||||
terraform plan # to see what will be created
|
||||
terraform apply # to create the infrastructure
|
||||
```
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
resource "google_container_cluster" "app_cluster" {
|
||||
name = "${var.app_name}-cluster-${var.environment}"
|
||||
location = var.region
|
||||
ip_allocation_policy {
|
||||
}
|
||||
enable_autopilot = true
|
||||
}
|
||||
|
||||
resource "google_compute_global_address" "external_static_ip" {
|
||||
name = "${var.app_name}-ingress-static-ip"
|
||||
address_type = "EXTERNAL"
|
||||
ip_version = "IPV4"
|
||||
project = var.project
|
||||
description = "External static IP address for app"
|
||||
}
|
||||
|
||||
output "external_static_ip" {
|
||||
value = google_compute_global_address.external_static_ip.address
|
||||
description = "External static IP address for app"
|
||||
}
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
terraform {
|
||||
required_providers {
|
||||
google = {
|
||||
source = "hashicorp/google"
|
||||
version = "4.55.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
provider "google" {
|
||||
project = var.project
|
||||
region = var.region
|
||||
zone = var.zone
|
||||
}
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
resource "google_artifact_registry_repository" "repo" {
|
||||
location = "europe-west1"
|
||||
repository_id = "${var.app_name}-artifact-repository"
|
||||
description = "Artifact repository created by Terraform"
|
||||
format = "DOCKER"
|
||||
}
|
||||
|
||||
output "artifact_registry_name" {
|
||||
value = google_artifact_registry_repository.repo.name
|
||||
description = "Artifact registry name"
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
resource "google_storage_bucket" "prod-bucket" {
|
||||
name = "${var.app_name}-bucket-${var.environment}"
|
||||
location = var.region
|
||||
project = var.project
|
||||
storage_class = "STANDARD"
|
||||
uniform_bucket_level_access = false
|
||||
# versioning {
|
||||
# enabled = true
|
||||
# }
|
||||
# lifecycle_rule {
|
||||
# action {
|
||||
# type = "Delete"
|
||||
# storage_class = "NEARLINE"
|
||||
# }
|
||||
# condition {
|
||||
# age = 30
|
||||
# }
|
||||
# }
|
||||
}
|
||||
|
||||
output "prod_bucket_name" {
|
||||
value = google_storage_bucket.prod-bucket.name
|
||||
description = "Prod Bucket name"
|
||||
}
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
variable "region" {}
|
||||
variable "zone" {}
|
||||
variable "location" {}
|
||||
variable "project" {}
|
||||
variable "environment" {}
|
||||
variable "app_name" {}
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
/** @type {import('ts-jest').JestConfigWithTsJest} */
|
||||
module.exports = {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
clearMocks: true,
|
||||
// roots: ['<rootDir>/src'],
|
||||
collectCoverage: true,
|
||||
collectCoverageFrom: ['src/**/([a-zA-Z_]*).{js,ts}', '!**/*.test.{js,ts}'],
|
||||
};
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,103 @@
|
|||
{
|
||||
"name": "typescript-rest-api-backend",
|
||||
"version": "1.0.0",
|
||||
"description": "Express Typescript Rest API backend template with full TS support following gts style guide and gke integration",
|
||||
"main": "src/index.ts",
|
||||
"scripts": {
|
||||
"start:dev": "cross-env NODE_ENV=development nodemon ./src/bin/server",
|
||||
"start:prod": "npm run prod",
|
||||
"prod": "cross-env NODE_ENV=production npm-run-all compile server",
|
||||
"server": "node ./build/src/bin/server",
|
||||
"lint": "gts lint",
|
||||
"clean": "gts clean",
|
||||
"compile": "npm run clean && tsc",
|
||||
"watch": "tsc -w",
|
||||
"fix": "gts fix",
|
||||
"prepare": "npm run compile",
|
||||
"pretest": "npm run compile",
|
||||
"posttest": "npm run lint",
|
||||
"test": "cross-env NODE_ENV=test jest --verbose",
|
||||
"docs": "rm -rf docs/ && typedoc",
|
||||
"generate:env": "sh ./scripts/generate-env.sh"
|
||||
},
|
||||
"author": "Giuseppe Albrizio",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@types/bcryptjs": "^2.4.2",
|
||||
"@types/compression": "^1.7.2",
|
||||
"@types/cookie-parser": "^1.4.3",
|
||||
"@types/cors": "^2.8.13",
|
||||
"@types/debug": "^4.1.8",
|
||||
"@types/express": "^4.17.17",
|
||||
"@types/express-session": "^1.17.7",
|
||||
"@types/jest": "^29.5.3",
|
||||
"@types/jsonwebtoken": "^9.0.2",
|
||||
"@types/lodash": "^4.14.195",
|
||||
"@types/multer": "^1.4.7",
|
||||
"@types/node": "^20.4.1",
|
||||
"@types/passport": "^1.0.12",
|
||||
"@types/passport-google-oauth20": "^2.0.11",
|
||||
"@types/passport-local": "^1.0.35",
|
||||
"@types/pdfmake": "^0.2.2",
|
||||
"@types/sparkpost": "^2.1.5",
|
||||
"@types/supertest": "^2.0.12",
|
||||
"@types/swagger-ui-express": "^4.1.3",
|
||||
"@types/validator": "^13.7.17",
|
||||
"cross-env": "^7.0.3",
|
||||
"gts": "^3.1.1",
|
||||
"jest": "^29.6.1",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"rimraf": "^5.0.1",
|
||||
"supertest": "^6.3.3",
|
||||
"ts-jest": "^29.1.1",
|
||||
"ts-node": "^10.9.1",
|
||||
"typedoc": "^0.24.8",
|
||||
"typescript": "~5.1.6"
|
||||
},
|
||||
"dependencies": {
|
||||
"@google-cloud/pubsub": "^3.7.1",
|
||||
"@google-cloud/storage": "^6.11.0",
|
||||
"@googlemaps/google-maps-services-js": "^3.3.33",
|
||||
"@hokify/agenda": "^6.3.0",
|
||||
"@types/morgan": "^1.9.4",
|
||||
"axios": "^1.4.0",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"clean-deep": "^3.4.0",
|
||||
"compression": "^1.7.4",
|
||||
"connect-mongo": "^5.0.0",
|
||||
"cookie-parser": "^1.4.6",
|
||||
"cors": "^2.8.5",
|
||||
"crypto": "^1.0.1",
|
||||
"crypto-random-string": "^5.0.0",
|
||||
"debug": "^4.3.4",
|
||||
"dotenv": "^16.3.1",
|
||||
"express": "^4.18.2",
|
||||
"express-mongo-sanitize": "^2.2.0",
|
||||
"express-rate-limit": "^6.7.1",
|
||||
"express-session": "^1.17.3",
|
||||
"firebase-admin": "^11.9.0",
|
||||
"helmet": "^7.0.0",
|
||||
"http": "^0.0.1-security",
|
||||
"jsonwebtoken": "^9.0.1",
|
||||
"lodash": "^4.17.21",
|
||||
"messagebird": "^4.0.1",
|
||||
"mongodb": "^5.7.0",
|
||||
"mongoose": "^7.3.3",
|
||||
"morgan": "^1.10.0",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"multer-cloud-storage": "^3.0.0",
|
||||
"mysql2": "^3.5.1",
|
||||
"nodemon": "^3.0.1",
|
||||
"passport": "^0.6.0",
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
"passport-local": "^1.0.0",
|
||||
"pdfmake": "^0.2.7",
|
||||
"slugify": "^1.6.6",
|
||||
"sparkpost": "^2.1.4",
|
||||
"swagger-ui-express": "^5.0.0",
|
||||
"validator": "^13.9.0",
|
||||
"winston": "^3.10.0",
|
||||
"xmlbuilder2": "^3.1.1",
|
||||
"xss-clean": "^0.1.1"
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
<html>
|
||||
<head>
|
||||
<title>Express</title>
|
||||
<link
|
||||
rel="stylesheet"
|
||||
type="text/css"
|
||||
href="public/stylesheets/style.css"
|
||||
/>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h1>Express</h1>
|
||||
<p>Welcome to Express Typescript Rest API</p>
|
||||
<p>Got to http://localhost:3000/api/v1 to test your API endpoint</p>
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
body {
|
||||
padding: 50px;
|
||||
font: 14px "Lucida Grande", Helvetica, Arial, sans-serif;
|
||||
}
|
||||
|
||||
a {
|
||||
color: #00B7FF;
|
||||
}
|
||||
|
|
@ -0,0 +1,191 @@
|
|||
#!/bin/sh
|
||||
|
||||
# Define color codes
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
CYAN='\033[0;36m'
|
||||
YELLOW='\033[0;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Check if .env file exists and delete
|
||||
echo "${YELLOW}Deleting old .env file...${NC}"
|
||||
rm -f ./.env.test.local
|
||||
|
||||
# Greet user
|
||||
echo "${CYAN}Hello! Let's set up your environment variables.${NC}"
|
||||
|
||||
# Ask user for variable content
|
||||
# Ask user for variable content and validate input
|
||||
while true; do
|
||||
read -p "What is the HOST? [0.0.0.0] " HOST
|
||||
# HOST=${HOST:-0.0.0.0} # set default value for PORT
|
||||
if [ -z "$HOST" ]; then
|
||||
echo "${RED}HOST cannot be blank. Please enter a value.${NC}"
|
||||
else
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# Ask user for variable content and validate input
|
||||
while true; do
|
||||
read -p "What is the port you want to run the server on? [3000] " PORT
|
||||
# PORT=${PORT:-3000} # set default value for PORT
|
||||
if [ -z "$PORT" ]; then
|
||||
echo "${RED}PORT cannot be blank. Please enter a value.${NC}"
|
||||
else
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
while true; do
|
||||
read -p "What is the name of the service? " SERVICE_NAME
|
||||
if [ -z "$SERVICE_NAME" ]; then
|
||||
echo "${RED}SERVICE_NAME cannot be blank. Please enter a value.${NC}"
|
||||
else
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
while true; do
|
||||
read -p "What is your JWT_KEY? " JWT_KEY
|
||||
if [ -z "$JWT_KEY" ]; then
|
||||
echo "${RED}JWT_KEY cannot be blank. Please enter a value.${NC}"
|
||||
else
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
while true; do
|
||||
read -p "What is your SECRET? " SECRET
|
||||
if [ -z "$SECRET" ]; then
|
||||
echo "${RED}SECRET cannot be blank. Please enter a value.${NC}"
|
||||
else
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
HASH=10 # set default value for HASH
|
||||
|
||||
read -p "What is the path to your Google Application Credentials file? [./src/config/gcloud/google-application-credentials.json] " GOOGLE_APPLICATION_CREDENTIALS
|
||||
GOOGLE_APPLICATION_CREDENTIALS=${GOOGLE_APPLICATION_CREDENTIALS:-./src/config/gcloud/google-application-credentials.json} # set default value for GOOGLE_APPLICATION_CREDENTIALS
|
||||
|
||||
read -p "What is your Google Cloud project ID? " GOOGLE_PROJECT_ID
|
||||
read -p "What is your Google Cloud Storage bucket name? " GOOGLE_STORAGE_BUCKET_NAME
|
||||
read -p "What is your Google Client ID? " GOOGLE_CLIENT_ID
|
||||
read -p "What is your Google Client Secret? " GOOGLE_CLIENT_SECRET
|
||||
read -p "What is your Google Maps API key? " GOOGLE_MAPS_API_KEY
|
||||
|
||||
read -p "What is your CLIENT_URL? [http://localhost:3000] " CLIENT_URL
|
||||
CLIENT_URL=${CLIENT_URL:-http://localhost:3000} # set default value for CLIENT_URL
|
||||
|
||||
read -p "What is your MongoDB URI? [mongodb://localhost:27017/database_name] " MONGO_URI
|
||||
MONGO_URI=${MONGO_URI:-mongodb://localhost:27017/database_name} # set default value for MONGO_URI
|
||||
|
||||
read -p "What is your MongoDB test URI? [mongodb://localhost:27017/test_database_name] " MONGO_URI_TEST
|
||||
MONGO_URI_TEST=${MONGO_URI_TEST:-mongodb://localhost:27017/test_database_name} # set default value for MONGO_URI_TEST
|
||||
|
||||
read -p "What is your MongoDB username? " MONGO_USER
|
||||
MONGO_USER=${MONGO_USER:-'your_mongo_user'} # set default value for MONGO_USER and add single quotes around the value
|
||||
|
||||
read -p "What is your MongoDB password? " MONGO_PASS
|
||||
MONGO_PASS=${MONGO_PASS:-'your_mongo_password'} # set default value for MONGO_PASS and add single quotes around the value
|
||||
|
||||
read -p "What is your MySQL staging host? " MYSQL_HOST_STAGE
|
||||
MYSQL_HOST_STAGE=${MYSQL_HOST_STAGE:-'your_myql_host_stage'} # set default value for MYSQL_HOST_STAGE and add single quotes around the value
|
||||
|
||||
read -p "What is your MySQL staging user? " MYSQL_USER_STAGE
|
||||
MYSQL_USER_STAGE=${MYSQL_USER_STAGE:-'your_myql_user'} # set default value for MYSQL_USER_STAGE and add single quotes around the value
|
||||
|
||||
read -p "What is your MySQL staging password? " MYSQL_PASSWORD_STAGE
|
||||
MYSQL_PASSWORD_STAGE=${MYSQL_PASSWORD_STAGE:-'your_myql_pass'} # set default value for MYSQL_PASSWORD_STAGE and add single quotes around the value
|
||||
|
||||
read -p "What is your MySQL staging database? " MYSQL_DB_STAGE
|
||||
MYSQL_DB_STAGE=${MYSQL_DB_STAGE:-'your_myql_db_name'} # set default value for MYSQL_DB_STAGE and add single quotes around the value
|
||||
|
||||
read -p "What is your MySQL staging socket? " MYSQL_SOCKET_STAGE
|
||||
MYSQL_SOCKET_STAGE=${MYSQL_SOCKET_STAGE:-'/your/socket-cloud-sql'} # set default value for MYSQL_SOCKET_STAGE and add single quotes around the value
|
||||
|
||||
read -p "What is your MySQL production host? " MYSQL_HOST_PROD
|
||||
MYSQL_HOST_PROD=${MYSQL_HOST_PROD:-'your_myql_host_stage'} # set default value for MYSQL_HOST_PROD and
|
||||
|
||||
read -p "What is your MySQL production user? " MYSQL_USER_PROD
|
||||
MYSQL_USER_PROD=${MYSQL_USER_PROD:-'your_myql_user'} # set default value for MYSQL_USER_PROD and add single quotes around the value
|
||||
|
||||
read -p "What is your MySQL production password? " MYSQL_PASSWORD_PROD
|
||||
MYSQL_PASSWORD_PROD=${MYSQL_PASSWORD_PROD:-'your_myql_pass'} # set default value for MYSQL_PASSWORD_PROD and add single quotes around the value
|
||||
|
||||
read -p "What is your MySQL production database? " MYSQL_DB_PROD
|
||||
MYSQL_DB_PROD=${MYSQL_DB_PROD:-'your_myql_db_name'} # set default value for MYSQL_DB_PROD and add single quotes around the value
|
||||
|
||||
read -p "What is your MySQL production socket? " MYSQL_SOCKET_PROD
|
||||
MYSQL_SOCKET_PROD=${MYSQL_SOCKET_PROD:-'/your/socket-cloud-sql'} # set default value for MYSQL_SOCKET_PROD and add single quotes around the value
|
||||
|
||||
read -p "What is your SparkPost API key? " SPARKPOST_API_KEY
|
||||
SPARKPOST_API_KEY=${SPARKPOST_API_KEY:-'your_sparkpost_api_key'} # set default value for SPARKPOST_API_KEY and add single quotes around the value
|
||||
|
||||
read -p "What is your SparkPost sender domain? " SPARKPOST_SENDER_DOMAIN
|
||||
SPARKPOST_SENDER_DOMAIN=${SPARKPOST_SENDER_DOMAIN:-'your_sparkpost_sender_domain'} # set default value for SPARKPOST_SENDER_DOMAIN and add single quotes around the value
|
||||
|
||||
read -p "What is your MessageBird Access Key? " MESSAGEBIRD_ACCESS_KEY
|
||||
MESSAGEBIRD_ACCESS_KEY=${MESSAGEBIRD_ACCESS_KEY:-'your_messagbird_access_key'} # set default value for MESSAGEBIRD_ACCESS_KEY and add single quotes around the value
|
||||
|
||||
read -p "What is your MessageBird WhatsApp Channel ID? " MESSAGEBIRD_WHATSAPP_CHANNEL_ID
|
||||
MESSAGEBIRD_WHATSAPP_CHANNEL_ID=${MESSAGEBIRD_WHATSAPP_CHANNEL_ID:-'your_messagebird_whatsapp_channel_id'} # set default value for MESSAGEBIRD_WHATSAPP_CHANNEL_ID and add single quotes around the value
|
||||
|
||||
read -p "What is your MessageBird Template Namespace ID? " MESSAGEBIRD_TEMPLATE_NAMESPACE_ID
|
||||
MESSAGEBIRD_TEMPLATE_NAMESPACE_ID=${MESSAGEBIRD_TEMPLATE_NAMESPACE_ID:-'your_messagebird_template_namespace_id'} # set default value for MESSAGEBIRD_TEMPLATE_NAMESPACE_ID and add single quotes around the value
|
||||
|
||||
# Write variables to .env file one level up from the script's location
|
||||
echo "# SERVER CONFIGURATION" >> ./.env.test.local
|
||||
echo "HOST=${HOST}" >> ./.env.test.local
|
||||
echo "PORT=${PORT}" >> ./.env.test.local
|
||||
echo "SERVICE_NAME='${SERVICE_NAME}'" >> ./.env.test.local
|
||||
|
||||
echo "# JWT CONFIGURATION" >> ./.env.test.local
|
||||
echo "JWT_KEY='${JWT_KEY}'" >> ./.env.test.local
|
||||
echo "SECRET='${SECRET}'" >> ./.env.test.local
|
||||
echo "HASH=${HASH}" >> ./.env.test.local
|
||||
|
||||
echo "# MONGO DB CONFIGURATION" >> ./.env.test.local
|
||||
echo "MONGO_URI='${MONGO_URI}'" >> ./.env.test.local
|
||||
echo "MONGO_URI_TEST='${MONGO_URI_TEST}'" >> ./.env.test.local
|
||||
echo "MONGO_USER='${MONGO_USER}'" >> ./.env.test.local
|
||||
echo "MONGO_PASS='${MONGO_PASS}'" >> ./.env.test.local
|
||||
|
||||
echo "# GOOGLE CLOUD CONFIGURATION" >> ./.env.test.local
|
||||
echo "GOOGLE_APPLICATION_CREDENTIALS='${GOOGLE_APPLICATION_CREDENTIALS}'" >> ./.env.test.local
|
||||
echo "GOOGLE_PROJECT_ID='${GOOGLE_PROJECT_ID}'" >> ./.env.test.local
|
||||
echo "GOOGLE_STORAGE_BUCKET_NAME='${GOOGLE_STORAGE_BUCKET_NAME}'" >> ./.env.test.local
|
||||
echo "GOOGLE_CLIENT_ID='${GOOGLE_CLIENT_ID}'" >> ./.env.test.local
|
||||
echo "GOOGLE_CLIENT_SECRET='${GOOGLE_CLIENT_SECRET}'" >> ./.env.test.local
|
||||
echo "GOOGLE_MAPS_API_KEY='${GOOGLE_MAPS_API_KEY}'" >> ./.env.test.local
|
||||
|
||||
echo "# CLIENT CONFIGURATION" >> ./.env.test.local
|
||||
echo "CLIENT_URL='${CLIENT_URL}'" >> ./.env.test.local
|
||||
|
||||
echo "# MYSQL CONFIGURATION DEVELOPMENT" >> ./.env.test.local
|
||||
echo "MYSQL_HOST_STAGE='${MYSQL_HOST_STAGE}'" >> ./.env.test.local
|
||||
echo "MYSQL_USER_STAGE='${MYSQL_USER_STAGE}'" >> ./.env.test.local
|
||||
echo "MYSQL_PASSWORD_STAGE='${MYSQL_PASSWORD_STAGE}'" >> ./.env.test.local
|
||||
echo "MYSQL_DB_STAGE='${MYSQL_DB_STAGE}'" >> ./.env.test.local
|
||||
echo "MYSQL_SOCKET_STAGE='${MYSQL_SOCKET_STAGE}'" >> ./.env.test.local
|
||||
|
||||
echo "# MYSQL CONFIGURATION PRODUCTION" >> ./.env.test.local
|
||||
echo "MYSQL_HOST_PROD='${MYSQL_HOST_PROD}'" >> ./.env.test.local
|
||||
echo "MYSQL_USER_PROD='${MYSQL_USER_PROD}'" >> ./.env.test.local
|
||||
echo "MYSQL_PASSWORD_PROD='${MYSQL_PASSWORD_PROD}'" >> ./.env.test.local
|
||||
echo "MYSQL_DB_PROD='${MYSQL_DB_PROD}'" >> ./.env.test.local
|
||||
echo "MYSQL_SOCKET_PROD='${MYSQL_SOCKET_PROD}'" >> ./.env.test.local
|
||||
|
||||
echo "# SPARKPOST CONFIGURATION" >> ./.env.test.local
|
||||
echo "SPARKPOST_API_KEY='${SPARKPOST_API_KEY}'" >> ./.env.test.local
|
||||
echo "SPARKPOST_SENDER_DOMAIN='${SPARKPOST_SENDER_DOMAIN}'" >> ./.env.test.local
|
||||
|
||||
echo "# MESSAGEBIRD CONFIGURATION" >> ./.env.test.local
|
||||
echo "MESSAGEBIRD_ACCESS_KEY='${MESSAGEBIRD_ACCESS_KEY}'" >> ./.env.test.local
|
||||
echo "MESSAGEBIRD_WHATSAPP_CHANNEL_ID='${MESSAGEBIRD_WHATSAPP_CHANNEL_ID}'" >> ./.env.test.local
|
||||
echo "MESSAGEBIRD_TEMPLATE_NAMESPACE_ID='${MESSAGEBIRD_TEMPLATE_NAMESPACE_ID}'" >> ./.env.test.local
|
||||
|
||||
# Success message
|
||||
echo "${GREEN}Your environment variables have been written to ./.env.test.local. Thank you for using this script!${NC}"
|
||||
echo "${GREEN}Please make sure to copy the .evn.test.local file to .env before going to production.${NC}"
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
#!/bin/bash
|
||||
|
||||
Check kubectl context
|
||||
CURRENT_CONTEXT=$(kubectl config current-context)
|
||||
if [ "$CURRENT_CONTEXT" != "docker-desktop" ]; then
|
||||
echo "Please set kubectl context to docker-desktop before running this script."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
Set the options
|
||||
NO_PRUNE=false
|
||||
CACHE_ARTIFACTS=false
|
||||
|
||||
Parse the options
|
||||
while getopts ":npca" opt; do
|
||||
case $opt in
|
||||
n) NO_PRUNE=true ;;
|
||||
p) NO_PRUNE=false ;;
|
||||
c) CACHE_ARTIFACTS=true ;;
|
||||
a) CACHE_ARTIFACTS=false ;;
|
||||
?) echo "Invalid option: -$OPTARG" >&2 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
Run the skaffold dev command with the options
|
||||
skaffold dev --no-prune=$NO_PRUNE --cache-artifacts=$CACHE_ARTIFACTS
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
/**
|
||||
* Roles are used to define the access rights of a user.
|
||||
* we use a custom middleware to check if the user has the right to access a route.
|
||||
* the middleware is located in the middleware folder (verifyApiRights.middleware.ts)
|
||||
*/
|
||||
export interface IApiRoles {
|
||||
superAdmin: string[];
|
||||
admin: string[];
|
||||
employee: string[];
|
||||
client: string[];
|
||||
vendor: string[];
|
||||
user: string[];
|
||||
}
|
||||
|
||||
const roles: IApiRoles = {
|
||||
superAdmin: ['*', 'getUsers', 'createUsers', 'manageUsers', ' deleteUsers'],
|
||||
admin: ['getUsers', 'createUsers', 'manageUsers', ' deleteUsers'],
|
||||
employee: ['getUsers'],
|
||||
client: ['getUsers'],
|
||||
vendor: ['getUsers'],
|
||||
user: ['getUsers'],
|
||||
};
|
||||
|
||||
export const apiRoles = Object.keys(roles);
|
||||
|
||||
export const apiRolesRights = new Map(Object.entries(roles));
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
# API App Readme
|
||||
|
||||
This is a sample controller and route for an Express app created for testing purposes.
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Node.js installed on your local machine
|
||||
- An understanding of the basics of Node.js and Express
|
||||
|
||||
### Installing
|
||||
|
||||
1. Clone the repository
|
||||
2. Install dependencies by running `npm install`
|
||||
3. Start the server by running `npm start`
|
||||
4. Access the app at `http://localhost:3000/api/v1` with the following routes
|
||||
|
||||
## Usage
|
||||
|
||||
The app has the following endpoints:
|
||||
|
||||
- `/test-route-protection`: A protected route to check if the user is authenticated
|
||||
- `/test-check-authenticated-user`: A route to check the authenticated user
|
||||
- `/test-pubsub-publish`: A route to publish a message to a Google PubSub topic
|
||||
- `/test-pubsub-pull-subscription`: A route to receive a message from a Google PubSub pull subscription
|
||||
- `/test-pubsub-push-subscription`: A route to receive a message from a Google PubSub push subscription
|
||||
- `/test-pdf-make`: A route to generate a PDF
|
||||
|
||||
To use the endpoints, send a request to the respective endpoint using a tool like Postman.
|
||||
|
||||
## Controller Functions
|
||||
|
||||
The app has the following controller functions:
|
||||
|
||||
### `checkRouteProtection`
|
||||
|
||||
A function to check if the user is authenticated and the test is completed.
|
||||
|
||||
### `checkUserLogged`
|
||||
|
||||
A function to check the authenticated user.
|
||||
|
||||
### `checkPubSubPublish`
|
||||
|
||||
A function to publish a message to a Google PubSub topic.
|
||||
|
||||
### `checkPubSubPullSubscription`
|
||||
|
||||
A function to receive a message from a Google PubSub pull subscription.
|
||||
|
||||
### `checkPubsubPushSubscription`
|
||||
|
||||
A function to receive a message from a Google PubSub push subscription.
|
||||
|
||||
### `checkPDFMake`
|
||||
|
||||
A function to generate a PDF.
|
||||
|
||||
## Acknowledgments
|
||||
|
||||
This app was created for testing purposes only.
|
||||
|
|
@ -0,0 +1,325 @@
|
|||
import {Response} from 'express';
|
||||
import {CustomError} from '../../../errors/CustomError.error';
|
||||
import {ICustomExpressRequest} from '../../../middlewares/currentUser.middleware';
|
||||
|
||||
import Logger from '../../../lib/logger';
|
||||
import {
|
||||
publishMessageToPubSubTopic,
|
||||
listenForPubSubPullSubscription,
|
||||
PubSubPublishError,
|
||||
} from '../../../services/google-pub-sub/pubsub.service';
|
||||
import {generatePDF, IPDFObject} from '../../../services/pdf/pdf.service';
|
||||
import {generateXML, IXMLObject} from '../../../services/xml/xml.service';
|
||||
import {
|
||||
IFirebaseMessage,
|
||||
sendMulticastFirebaseMessage,
|
||||
sendSingleFirebaseMessage,
|
||||
} from '../../../services/messaging/firebase.service';
|
||||
|
||||
/**
|
||||
* Test controller - Protected router test
|
||||
* @param req - Custom request object
|
||||
* @param res - Response object
|
||||
*/
|
||||
const checkRouteProtection = (
|
||||
req: ICustomExpressRequest,
|
||||
res: Response
|
||||
): void => {
|
||||
res.status(200).json({
|
||||
status: 'success',
|
||||
data: {
|
||||
message: 'Yes you are authenticated and the test is completed',
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Test controller - Check authenticated user
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
const checkUserLogged = async (req: ICustomExpressRequest, res: Response) => {
|
||||
try {
|
||||
res.status(200).json({
|
||||
status: 'success',
|
||||
message: 'User logged retrieved',
|
||||
userInPassport: req?.user,
|
||||
userInSession: req?.session,
|
||||
userInCustomMiddleware: req.currentUser,
|
||||
});
|
||||
} catch (error) {
|
||||
Logger.debug(error);
|
||||
if (error instanceof CustomError) {
|
||||
throw new CustomError(error.statusCode, error.message);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Test controller - Check PubSub publish message to a topic
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
const checkPubSubPublish = async (
|
||||
req: ICustomExpressRequest,
|
||||
res: Response
|
||||
) => {
|
||||
try {
|
||||
const message = await publishMessageToPubSubTopic(
|
||||
{test: 'test', message: 'this is a message'},
|
||||
'test'
|
||||
);
|
||||
|
||||
res.status(200).json({
|
||||
status: 'success',
|
||||
message: 'Message published to PubSub',
|
||||
response: {messageId: message},
|
||||
});
|
||||
} catch (error) {
|
||||
if (error instanceof PubSubPublishError) {
|
||||
res.status(error.statusCode).json({
|
||||
status: 'error',
|
||||
message: error.message,
|
||||
});
|
||||
} else {
|
||||
res.status(500).json({
|
||||
status: 'error',
|
||||
message: 'Failed to publish message to PubSub. Reason not known',
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Test controller - Check PubSub message from a pull subscription
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
const checkPubSubPullSubscription = async (
|
||||
req: ICustomExpressRequest,
|
||||
res: Response
|
||||
) => {
|
||||
try {
|
||||
const response = await listenForPubSubPullSubscription(
|
||||
'test-pull-subscription',
|
||||
10
|
||||
);
|
||||
|
||||
res.status(200).json({
|
||||
status: 'success',
|
||||
message: 'Message received from PubSub Pull Subscription',
|
||||
response,
|
||||
});
|
||||
} catch (error) {
|
||||
if (error instanceof PubSubPublishError) {
|
||||
res.status(error.statusCode).json({
|
||||
status: 'error',
|
||||
message: error.message,
|
||||
});
|
||||
} else {
|
||||
res.status(500).json({
|
||||
status: 'error',
|
||||
message: 'Failed to listen for pull message. Reason not known',
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Test controller - Check PubSub message from a push subscription
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
const checkPubsubPushSubscription = async (
|
||||
req: ICustomExpressRequest,
|
||||
res: Response
|
||||
) => {
|
||||
try {
|
||||
const data = Buffer.from(req.body.message.data, 'base64').toString('utf-8');
|
||||
|
||||
const response = await JSON.parse(data);
|
||||
Logger.debug(response);
|
||||
|
||||
res.status(200).send('Message received from PubSub Push Subscription');
|
||||
} catch (error) {
|
||||
Logger.error(error);
|
||||
if (error instanceof CustomError) {
|
||||
res.status(error.statusCode).json({
|
||||
status: 'error',
|
||||
message: error.message,
|
||||
});
|
||||
} else {
|
||||
res.status(500).json({
|
||||
status: 'error',
|
||||
message: 'Failed to listen for push message. Reason not known',
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Test controller - Check PDF generation
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
const checkPDFMake = async (req: ICustomExpressRequest, res: Response) => {
|
||||
try {
|
||||
const body: IPDFObject = {
|
||||
key: 'value',
|
||||
};
|
||||
|
||||
const directory = 'pdfs';
|
||||
|
||||
const response = await generatePDF(body, directory);
|
||||
|
||||
return res.status(200).json({
|
||||
status: 'success',
|
||||
message: 'PDF generated',
|
||||
response,
|
||||
});
|
||||
} catch (error) {
|
||||
Logger.error(error);
|
||||
if (error instanceof CustomError) {
|
||||
res.status(error.statusCode).json({
|
||||
status: 'error',
|
||||
message: error.message,
|
||||
});
|
||||
} else {
|
||||
res.status(500).json({
|
||||
status: 'error',
|
||||
message: `Failed to generate PDF. Reason error: ${error}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Test controller - Check XML generation
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
const checkXMLBuilder = async (req: ICustomExpressRequest, res: Response) => {
|
||||
try {
|
||||
const body: IXMLObject = {
|
||||
key: 'value',
|
||||
};
|
||||
|
||||
const response = await generateXML(body);
|
||||
|
||||
return res.status(200).json({
|
||||
status: 'success',
|
||||
message: 'XML generated',
|
||||
response,
|
||||
});
|
||||
} catch (error) {
|
||||
Logger.error(error);
|
||||
if (error instanceof CustomError) {
|
||||
res.status(error.statusCode).json({
|
||||
status: 'error',
|
||||
message: error.message,
|
||||
});
|
||||
} else {
|
||||
res.status(500).json({
|
||||
status: 'error',
|
||||
message: `Failed to generate PDF. Reason error: ${error}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Test controller - Check Firebase single notification
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
const checkFirebaseSingleNotification = async (
|
||||
req: ICustomExpressRequest,
|
||||
res: Response
|
||||
) => {
|
||||
try {
|
||||
const {message, userId} = req.body;
|
||||
|
||||
// validate that the message object has the correct interface
|
||||
const validatedMessage: IFirebaseMessage = message;
|
||||
|
||||
const response = await sendSingleFirebaseMessage(validatedMessage, userId);
|
||||
|
||||
res.status(200).json({
|
||||
status: 'success',
|
||||
message: 'Message sent to Firebase',
|
||||
response,
|
||||
});
|
||||
} catch (error) {
|
||||
if (error instanceof CustomError) {
|
||||
res.status(error.statusCode).json({
|
||||
status: 'error',
|
||||
message: error.message,
|
||||
});
|
||||
} else {
|
||||
res.status(500).json({
|
||||
status: 'error',
|
||||
message: 'Failed to send message to Firebase',
|
||||
error,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Test controller - Check Firebase multicast notification
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
const checkFirebaseMulticastNotification = async (
|
||||
req: ICustomExpressRequest,
|
||||
res: Response
|
||||
) => {
|
||||
try {
|
||||
const {message, usersId} = req.body;
|
||||
|
||||
// validate that the message object has the correct interface
|
||||
const validatedMessage: IFirebaseMessage = message;
|
||||
|
||||
if (!Array.isArray(usersId)) {
|
||||
throw new CustomError(400, 'usersId must be an array');
|
||||
}
|
||||
|
||||
const response = await sendMulticastFirebaseMessage(
|
||||
validatedMessage,
|
||||
usersId
|
||||
);
|
||||
|
||||
res.status(200).json({
|
||||
status: response.status,
|
||||
message: response.message,
|
||||
response: response.response,
|
||||
});
|
||||
} catch (error) {
|
||||
if (error instanceof CustomError) {
|
||||
res.status(error.statusCode).json({
|
||||
status: 'error',
|
||||
message: error.message,
|
||||
});
|
||||
} else {
|
||||
res.status(500).json({
|
||||
status: 'error',
|
||||
message: 'Failed to send message to Firebase',
|
||||
error,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export {
|
||||
checkRouteProtection,
|
||||
checkUserLogged,
|
||||
checkPubSubPublish,
|
||||
checkPubSubPullSubscription,
|
||||
checkPubsubPushSubscription,
|
||||
checkPDFMake,
|
||||
checkXMLBuilder,
|
||||
checkFirebaseSingleNotification,
|
||||
checkFirebaseMulticastNotification,
|
||||
};
|
||||
|
|
@ -0,0 +1,73 @@
|
|||
import express from 'express';
|
||||
import catchAsyncHandler from '../../../middlewares/catchAsyncHandler.middleware';
|
||||
|
||||
import {requireAuthenticationMiddleware} from '../../../middlewares/requireAuthentication.middleware';
|
||||
import {
|
||||
checkPubSubPullSubscription,
|
||||
checkPubsubPushSubscription,
|
||||
checkPubSubPublish,
|
||||
checkRouteProtection,
|
||||
checkUserLogged,
|
||||
checkPDFMake,
|
||||
checkXMLBuilder,
|
||||
checkFirebaseSingleNotification,
|
||||
checkFirebaseMulticastNotification,
|
||||
} from './app.controller';
|
||||
|
||||
const appRouter = express.Router();
|
||||
|
||||
appRouter.get(
|
||||
'/test-route-protection',
|
||||
requireAuthenticationMiddleware,
|
||||
catchAsyncHandler(checkRouteProtection)
|
||||
);
|
||||
|
||||
appRouter.get(
|
||||
'/test-check-authenticated-user',
|
||||
requireAuthenticationMiddleware,
|
||||
catchAsyncHandler(checkUserLogged)
|
||||
);
|
||||
|
||||
appRouter.post(
|
||||
'/test-pubsub-publish',
|
||||
requireAuthenticationMiddleware,
|
||||
catchAsyncHandler(checkPubSubPublish)
|
||||
);
|
||||
|
||||
appRouter.post(
|
||||
'/test-pubsub-pull-subscription',
|
||||
requireAuthenticationMiddleware,
|
||||
catchAsyncHandler(checkPubSubPullSubscription)
|
||||
);
|
||||
|
||||
appRouter.post(
|
||||
'/test-pubsub-push-subscription',
|
||||
requireAuthenticationMiddleware,
|
||||
catchAsyncHandler(checkPubsubPushSubscription)
|
||||
);
|
||||
|
||||
appRouter.post(
|
||||
'/test-pdf-make',
|
||||
requireAuthenticationMiddleware,
|
||||
catchAsyncHandler(checkPDFMake)
|
||||
);
|
||||
|
||||
appRouter.post(
|
||||
'/test-xml-builder',
|
||||
requireAuthenticationMiddleware,
|
||||
catchAsyncHandler(checkXMLBuilder)
|
||||
);
|
||||
|
||||
appRouter.post(
|
||||
'/test-firebase-single-message',
|
||||
requireAuthenticationMiddleware,
|
||||
catchAsyncHandler(checkFirebaseSingleNotification)
|
||||
);
|
||||
|
||||
appRouter.post(
|
||||
'/test-firebase-multicast-message',
|
||||
requireAuthenticationMiddleware,
|
||||
catchAsyncHandler(checkFirebaseMulticastNotification)
|
||||
);
|
||||
|
||||
export default appRouter;
|
||||
|
|
@ -0,0 +1,290 @@
|
|||
import {NextFunction, Response} from 'express';
|
||||
import {IVerifyOptions} from 'passport-local';
|
||||
|
||||
import {ICustomExpressRequest} from '../../../middlewares/currentUser.middleware';
|
||||
import createCookieFromToken from '../../../utils/createCookieFromToken.utils';
|
||||
import {CustomError} from '../../../errors';
|
||||
import Logger from '../../../lib/logger';
|
||||
import passport from '../../../config/passport.config';
|
||||
import User, {IUserMethods} from '../user/user.model';
|
||||
import {sendResetPasswordToken} from '../../../services/email/sparkpost.service';
|
||||
|
||||
/**
|
||||
* Signup Local strategy
|
||||
* @param req
|
||||
* @param res
|
||||
* @param next
|
||||
* @returns
|
||||
*/
|
||||
const signup = (
|
||||
req: ICustomExpressRequest,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<void> => {
|
||||
return passport.authenticate(
|
||||
'signup',
|
||||
{session: false},
|
||||
async (err: Error, user: IUserMethods, info: IVerifyOptions) => {
|
||||
try {
|
||||
if (err || !user) {
|
||||
const {message} = info;
|
||||
return res.status(400).json({
|
||||
status: 'error',
|
||||
error: {
|
||||
message,
|
||||
},
|
||||
});
|
||||
}
|
||||
createCookieFromToken(user, 201, req, res);
|
||||
} catch (error) {
|
||||
Logger.error(error);
|
||||
if (error instanceof CustomError) {
|
||||
throw new CustomError(error.statusCode, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
)(req, res, next);
|
||||
};
|
||||
|
||||
/**
|
||||
* Login Local strategy
|
||||
* @param req
|
||||
* @param res
|
||||
* @param next
|
||||
*/
|
||||
const login = (
|
||||
req: ICustomExpressRequest,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
) => {
|
||||
passport.authenticate(
|
||||
'login',
|
||||
{session: false},
|
||||
async (err: Error, user: IUserMethods, info: IVerifyOptions) => {
|
||||
try {
|
||||
if (err || !user) {
|
||||
return res.status(401).json({
|
||||
status: 'error',
|
||||
error: {
|
||||
message: info.message,
|
||||
},
|
||||
});
|
||||
}
|
||||
// call req.login manually to set the session and
|
||||
// init passport correctly in serialize & deserialize
|
||||
req.logIn(user, error => {
|
||||
if (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
// generate a signed json web token with the contents of user
|
||||
// object and return it in the response
|
||||
createCookieFromToken(user, 200, req, res);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
Logger.error(error);
|
||||
if (error instanceof CustomError) {
|
||||
throw new CustomError(error.statusCode, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
)(req, res, next);
|
||||
};
|
||||
|
||||
/**
|
||||
* Logout
|
||||
* @param req
|
||||
* @param res
|
||||
* @param next
|
||||
*/
|
||||
const logout = (
|
||||
req: ICustomExpressRequest,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
) => {
|
||||
try {
|
||||
res.clearCookie('jwt');
|
||||
res.clearCookie('connect.sid');
|
||||
req.session.destroy(error => {
|
||||
if (error) {
|
||||
return next(error);
|
||||
}
|
||||
return res.status(200).json({
|
||||
status: 'success',
|
||||
message: 'You have successfully logged out',
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
Logger.error(error);
|
||||
if (error instanceof CustomError) {
|
||||
throw new CustomError(error.statusCode, error.message);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Recover password
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
const recoverPassword = async (req: ICustomExpressRequest, res: Response) => {
|
||||
try {
|
||||
const {email} = req.body;
|
||||
const user = await User.findOne({email}).exec();
|
||||
|
||||
if (!user) {
|
||||
return res.status(404).json({
|
||||
status: 'error',
|
||||
error: {
|
||||
status: 'error',
|
||||
message: 'User not found',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Destroy session and remove any cookie
|
||||
req.session.destroy(() => {
|
||||
res.clearCookie('jwt');
|
||||
});
|
||||
|
||||
res.clearCookie('jwt');
|
||||
|
||||
// Generate and set password reset token
|
||||
user.generatePasswordResetToken();
|
||||
|
||||
// Save the updated user object with a resetPasswordToken and expire
|
||||
await user.save();
|
||||
|
||||
// Send email to the user with the token
|
||||
const sendEmail = await sendResetPasswordToken(
|
||||
user.email as string,
|
||||
user.resetPasswordToken as string
|
||||
);
|
||||
|
||||
res.status(200).json({
|
||||
status: 'success',
|
||||
message: `A reset email has been sent to ${user.email}.`,
|
||||
user: {
|
||||
email: user.email,
|
||||
token: user.resetPasswordToken,
|
||||
},
|
||||
emailStatus: sendEmail,
|
||||
});
|
||||
} catch (error) {
|
||||
Logger.error(error);
|
||||
if (error instanceof CustomError) {
|
||||
throw new CustomError(error.statusCode, error.message);
|
||||
} else {
|
||||
res.status(500).json({
|
||||
status: 'error',
|
||||
message: 'Email could not be sent',
|
||||
error,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Reset password
|
||||
* @param req
|
||||
* @param res
|
||||
* @param next
|
||||
*/
|
||||
const resetPassword = (
|
||||
req: ICustomExpressRequest,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
) => {
|
||||
passport.authenticate(
|
||||
'reset-password',
|
||||
{session: false},
|
||||
async (err: Error, user: IUserMethods, info: IVerifyOptions) => {
|
||||
try {
|
||||
if (err || !user) {
|
||||
return res.status(400).json({
|
||||
status: 'error',
|
||||
error: {
|
||||
message: info.message,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
res.status(200).json({
|
||||
status: 'success',
|
||||
message: 'Password successfully updated',
|
||||
});
|
||||
} catch (error) {
|
||||
Logger.error(error);
|
||||
|
||||
if (error instanceof CustomError) {
|
||||
throw new CustomError(error.statusCode, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
)(req, res, next);
|
||||
};
|
||||
|
||||
/**
|
||||
* Return authenticated user
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
const returnUserLogged = async (req: ICustomExpressRequest, res: Response) => {
|
||||
try {
|
||||
if (!req.currentUser) {
|
||||
return res.status(401).json({
|
||||
status: 'error',
|
||||
error: {
|
||||
message:
|
||||
'If you can see this message there is something wrong with authentication',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const user = await User.findById(req.currentUser?.id);
|
||||
|
||||
res.status(200).json({
|
||||
status: 'success',
|
||||
message: 'User logged retrieved',
|
||||
data: {
|
||||
user,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
Logger.error(error);
|
||||
if (error instanceof CustomError) {
|
||||
throw new CustomError(error.statusCode, error.message);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Google login
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
const googleLogin = async (req: ICustomExpressRequest, res: Response) => {
|
||||
try {
|
||||
const user = req.user as IUserMethods;
|
||||
|
||||
createCookieFromToken(user, 201, req, res);
|
||||
} catch (error) {
|
||||
Logger.debug(error);
|
||||
if (error instanceof CustomError) {
|
||||
throw new CustomError(error.statusCode, error.message);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export {
|
||||
signup,
|
||||
login,
|
||||
logout,
|
||||
recoverPassword,
|
||||
resetPassword,
|
||||
returnUserLogged,
|
||||
googleLogin,
|
||||
};
|
||||
|
|
@ -0,0 +1,61 @@
|
|||
import express from 'express';
|
||||
import passport from '../../../config/passport.config';
|
||||
import {
|
||||
recoverPasswordApiLimiter,
|
||||
resetPasswordApiLimiter,
|
||||
} from '../../../middlewares/apiRateLimit.middleware';
|
||||
import catchAsyncHandler from '../../../middlewares/catchAsyncHandler.middleware';
|
||||
import {requireAuthenticationMiddleware} from '../../../middlewares/requireAuthentication.middleware';
|
||||
|
||||
import {
|
||||
googleLogin,
|
||||
login,
|
||||
logout,
|
||||
recoverPassword,
|
||||
resetPassword,
|
||||
returnUserLogged,
|
||||
signup,
|
||||
} from './auth.controller';
|
||||
|
||||
const authRouter = express.Router();
|
||||
|
||||
authRouter.post('/signup', catchAsyncHandler(signup));
|
||||
authRouter.post('/login', catchAsyncHandler(login));
|
||||
authRouter.post('/logout', catchAsyncHandler(logout));
|
||||
authRouter.post(
|
||||
'/recover-password',
|
||||
recoverPasswordApiLimiter,
|
||||
catchAsyncHandler(recoverPassword)
|
||||
);
|
||||
authRouter.post(
|
||||
'/reset-password',
|
||||
resetPasswordApiLimiter,
|
||||
catchAsyncHandler(resetPassword)
|
||||
);
|
||||
authRouter.get(
|
||||
'/me',
|
||||
requireAuthenticationMiddleware,
|
||||
catchAsyncHandler(returnUserLogged)
|
||||
);
|
||||
|
||||
/**
|
||||
* Social Authentication: Google
|
||||
*/
|
||||
authRouter.get(
|
||||
'/google',
|
||||
passport.authenticate('google', {
|
||||
session: false,
|
||||
scope: ['profile', 'email'],
|
||||
})
|
||||
);
|
||||
// callback route for Google authentication
|
||||
authRouter.get(
|
||||
'/google/callback',
|
||||
passport.authenticate('google', {
|
||||
session: false,
|
||||
scope: ['profile', 'email'],
|
||||
}),
|
||||
googleLogin
|
||||
);
|
||||
|
||||
export default authRouter;
|
||||
|
|
@ -0,0 +1,106 @@
|
|||
import dotenv from 'dotenv';
|
||||
import mongoose, {
|
||||
Types,
|
||||
Document,
|
||||
HydratedDocument,
|
||||
Model,
|
||||
Schema,
|
||||
} from 'mongoose';
|
||||
import {CustomError} from '../../../errors';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
if (!process.env.JWT_KEY) {
|
||||
throw new CustomError(
|
||||
404,
|
||||
'Please provide a JWT_KEY as global environment variable'
|
||||
);
|
||||
}
|
||||
|
||||
export interface IDatabaseLog {
|
||||
_id: Types.ObjectId;
|
||||
type: string;
|
||||
date: Date;
|
||||
level: string;
|
||||
details: {
|
||||
channel: string;
|
||||
message: string;
|
||||
status: string;
|
||||
response?: Schema.Types.Mixed;
|
||||
};
|
||||
}
|
||||
|
||||
export interface IDatabaseLogMethods {
|
||||
toJSON(): Document<this>;
|
||||
}
|
||||
|
||||
export interface IDatabaseLogModel
|
||||
extends Model<IDatabaseLog, {}, IDatabaseLogMethods> {
|
||||
checkExistingField: (
|
||||
field: string,
|
||||
value: string
|
||||
) => Promise<HydratedDocument<IDatabaseLog, IDatabaseLogMethods>>;
|
||||
}
|
||||
|
||||
const DatabaseLogSchema = new Schema<
|
||||
IDatabaseLog,
|
||||
IDatabaseLogModel,
|
||||
IDatabaseLogMethods
|
||||
>(
|
||||
{
|
||||
type: {type: String, required: true},
|
||||
date: {type: Date, required: true},
|
||||
level: {type: String, required: true},
|
||||
details: {
|
||||
channel: {type: String, required: true},
|
||||
message: {type: String, required: true},
|
||||
status: {type: String, required: true},
|
||||
response: Schema.Types.Mixed,
|
||||
},
|
||||
},
|
||||
{
|
||||
toJSON: {
|
||||
virtuals: true,
|
||||
getters: true,
|
||||
},
|
||||
toObject: {
|
||||
virtuals: true,
|
||||
getters: true,
|
||||
},
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
|
||||
DatabaseLogSchema.index({
|
||||
type: 1,
|
||||
date: 1,
|
||||
level: 1,
|
||||
'details.channel': 1,
|
||||
'details.message': 1,
|
||||
'details.status': 1,
|
||||
});
|
||||
|
||||
DatabaseLogSchema.methods.toJSON = function () {
|
||||
const logObj = this.toObject();
|
||||
logObj.id = logObj._id; // remap _id to id
|
||||
|
||||
delete logObj._id;
|
||||
delete logObj.__v;
|
||||
return logObj;
|
||||
};
|
||||
|
||||
DatabaseLogSchema.statics.checkExistingField = async function (
|
||||
field: string,
|
||||
value: string
|
||||
) {
|
||||
const log = await this.findOne({[field]: value});
|
||||
return log;
|
||||
};
|
||||
|
||||
const DatabaseLog = mongoose.model<IDatabaseLog, IDatabaseLogModel>(
|
||||
'DatabaseLog',
|
||||
DatabaseLogSchema,
|
||||
'logs'
|
||||
);
|
||||
|
||||
export default DatabaseLog;
|
||||
|
|
@ -0,0 +1,59 @@
|
|||
import express, {Response} from 'express';
|
||||
import _ from 'lodash';
|
||||
import {ICustomExpressRequest} from '../../middlewares/currentUser.middleware';
|
||||
|
||||
import appRouter from './app/app.route';
|
||||
import authRouter from './auth/auth.route';
|
||||
|
||||
import swaggerRouter from './swagger/swagger.route';
|
||||
import typedocRouter from './typedoc/typedoc.route';
|
||||
|
||||
import {
|
||||
apiV1RateLimiter,
|
||||
devlopmentApiLimiter,
|
||||
} from '../../middlewares/apiRateLimit.middleware';
|
||||
|
||||
const apiV1Router = express.Router();
|
||||
|
||||
apiV1Router.get('/', (req: ICustomExpressRequest, res: Response) => {
|
||||
res.status(200).json({
|
||||
status: 'success',
|
||||
message: 'Healthy check completed successfully',
|
||||
});
|
||||
});
|
||||
|
||||
const defaultRoutes = [
|
||||
{
|
||||
path: '/app',
|
||||
route: appRouter,
|
||||
},
|
||||
{
|
||||
path: '/auth',
|
||||
route: authRouter,
|
||||
},
|
||||
];
|
||||
|
||||
const devRoutes = [
|
||||
{
|
||||
path: '/documentation',
|
||||
route: swaggerRouter,
|
||||
},
|
||||
{
|
||||
path: '/typedoc', // this route will serve typedoc generated documentation
|
||||
route: typedocRouter,
|
||||
},
|
||||
];
|
||||
|
||||
_.forEach(defaultRoutes, route => {
|
||||
apiV1Router.use(apiV1RateLimiter);
|
||||
apiV1Router.use(route.path, route.route);
|
||||
});
|
||||
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
_.forEach(devRoutes, route => {
|
||||
apiV1Router.use(devlopmentApiLimiter);
|
||||
apiV1Router.use(route.path, route.route);
|
||||
});
|
||||
}
|
||||
|
||||
export default apiV1Router;
|
||||
|
|
@ -0,0 +1,136 @@
|
|||
{
|
||||
"openapi": "3.0.0",
|
||||
"info": {
|
||||
"title": "Express Typescript Rest Api",
|
||||
"description": "Express Typescript Rest Api",
|
||||
"termsOfService": "https://github.com/giuseppealbrizio/express-typescript-rest-api",
|
||||
"contact": {
|
||||
"email": "g.albrizio@gmail.com"
|
||||
},
|
||||
"license": {
|
||||
"name": "MIT",
|
||||
"url": "https://opensource.org/licenses/MIT"
|
||||
},
|
||||
"version": "1.0.0"
|
||||
},
|
||||
"externalDocs": {
|
||||
"description": "Find out more about this template",
|
||||
"url": "https://github.com/giuseppealbrizio"
|
||||
},
|
||||
"servers": [
|
||||
{
|
||||
"url": "http://localhost:3000/api/v1"
|
||||
},
|
||||
{
|
||||
"url": "http://localhost:3000/api/v1"
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
{
|
||||
"name": "App",
|
||||
"description": "App routes"
|
||||
}
|
||||
],
|
||||
"paths": {
|
||||
"/app": {
|
||||
"get": {
|
||||
"tags": ["App"],
|
||||
"summary": "App router",
|
||||
"operationId": "appTest",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "successful operation",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"status": {
|
||||
"type": "string"
|
||||
},
|
||||
"message": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Missing credentials",
|
||||
"content": {}
|
||||
},
|
||||
"401": {
|
||||
"description": "Invalid token, please log in or sign up",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"status": {
|
||||
"type": "string"
|
||||
},
|
||||
"error": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"message": {
|
||||
"type": "string"
|
||||
},
|
||||
"trace": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"statusCode": {
|
||||
"type": "number"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearerAuth": []
|
||||
}
|
||||
],
|
||||
"x-codegen-request-body-name": "body"
|
||||
}
|
||||
}
|
||||
},
|
||||
"components": {
|
||||
"schemas": {
|
||||
"App": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "integer",
|
||||
"format": "int64"
|
||||
},
|
||||
"username": {
|
||||
"type": "string"
|
||||
},
|
||||
"email": {
|
||||
"type": "string"
|
||||
},
|
||||
"password": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"xml": {
|
||||
"name": "User"
|
||||
}
|
||||
}
|
||||
},
|
||||
"securitySchemes": {
|
||||
"bearerAuth": {
|
||||
"type": "http",
|
||||
"scheme": "bearer",
|
||||
"bearerFormat": "JWT"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
import express from 'express';
|
||||
import swaggerUi from 'swagger-ui-express';
|
||||
import swaggerDocument from './swagger.json';
|
||||
|
||||
const swaggerRouter = express.Router();
|
||||
|
||||
const options = {
|
||||
explorer: true,
|
||||
};
|
||||
|
||||
swaggerRouter.use('/', swaggerUi.serve);
|
||||
swaggerRouter.get('/', swaggerUi.setup(swaggerDocument, options));
|
||||
|
||||
export default swaggerRouter;
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
import express from 'express';
|
||||
import path from 'path';
|
||||
|
||||
const typedocRouter = express.Router();
|
||||
|
||||
typedocRouter.use(express.static(path.join(__dirname, '../../../../docs')));
|
||||
|
||||
typedocRouter.get('/typedoc', (req, res) => {
|
||||
res.sendFile(path.join(__dirname, '../../../../docs/index.html'));
|
||||
});
|
||||
|
||||
export default typedocRouter;
|
||||
|
|
@ -0,0 +1,340 @@
|
|||
import bcrypt from 'bcryptjs';
|
||||
import crypto from 'crypto';
|
||||
import dotenv from 'dotenv';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import mongoose, {
|
||||
HydratedDocument,
|
||||
Document,
|
||||
Model,
|
||||
Schema,
|
||||
Types,
|
||||
} from 'mongoose';
|
||||
|
||||
import validator from 'validator';
|
||||
import {CustomError} from '../../../errors';
|
||||
import {apiRoles} from '../../config/roles.config';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
if (!process.env.JWT_KEY) {
|
||||
throw new CustomError(
|
||||
404,
|
||||
'Please provide a JWT_KEY as global environment variable'
|
||||
);
|
||||
}
|
||||
|
||||
const jwtKey = process.env.JWT_KEY;
|
||||
|
||||
/**
|
||||
* Define the Google Passport interface
|
||||
*/
|
||||
|
||||
export interface IGooglePassport {
|
||||
id: string;
|
||||
sync: boolean;
|
||||
tokens: {
|
||||
accessToken: string;
|
||||
refreshToken: string;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* define user messages interface
|
||||
*/
|
||||
export interface IUserMessages {
|
||||
title: string;
|
||||
body: string;
|
||||
type: string;
|
||||
read: boolean;
|
||||
firebaseMessageId: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the User model...
|
||||
*/
|
||||
export interface IUser {
|
||||
// isModified(arg0: string): unknown;
|
||||
_id: Types.ObjectId;
|
||||
username: string;
|
||||
fullName: string;
|
||||
email: string;
|
||||
password: string;
|
||||
resetPasswordToken?: string;
|
||||
resetPasswordExpires?: Date;
|
||||
google: IGooglePassport;
|
||||
role: string;
|
||||
active: boolean;
|
||||
pictureUrl: string;
|
||||
pictureBlob: string;
|
||||
lastLoginDate: Date;
|
||||
notification: {
|
||||
fcmPermission: string;
|
||||
firebaseMessageToken: string;
|
||||
};
|
||||
messages: IUserMessages[];
|
||||
featureFlags?: {
|
||||
[key: string]: string;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Exporting methods for User
|
||||
*/
|
||||
export interface IUserMethods {
|
||||
toJSON(): Document<this>;
|
||||
comparePassword(password: string): Promise<boolean>;
|
||||
generateVerificationToken(): string;
|
||||
generatePasswordResetToken(): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Model type that knows about Methods and stati and IUser...
|
||||
*/
|
||||
export interface IUserModel extends Model<IUser, {}, IUserMethods> {
|
||||
checkExistingField: (
|
||||
field: string,
|
||||
value: string
|
||||
) => Promise<HydratedDocument<IUser, IUserMethods>>;
|
||||
}
|
||||
|
||||
const MessageSchema = new Schema(
|
||||
{
|
||||
title: {
|
||||
type: String,
|
||||
required: true,
|
||||
trim: true,
|
||||
},
|
||||
body: {
|
||||
type: String,
|
||||
required: true,
|
||||
trim: true,
|
||||
},
|
||||
type: {
|
||||
type: String,
|
||||
required: true,
|
||||
trim: true,
|
||||
},
|
||||
read: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
firebaseMessageId: {
|
||||
type: String,
|
||||
},
|
||||
},
|
||||
{
|
||||
toJSON: {
|
||||
virtuals: true,
|
||||
getters: true,
|
||||
},
|
||||
toObject: {
|
||||
virtuals: true,
|
||||
getters: true,
|
||||
},
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
|
||||
const UserSchema = new Schema<IUser, IUserModel, IUserMethods>(
|
||||
{
|
||||
username: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
lowercase: true,
|
||||
index: true,
|
||||
},
|
||||
fullName: {
|
||||
type: String,
|
||||
},
|
||||
email: {
|
||||
type: String,
|
||||
required: [true, "Email can't be blank"],
|
||||
unique: true,
|
||||
lowercase: true,
|
||||
index: true,
|
||||
// TODO: Re-enable the validation once migration is completed
|
||||
validate: [validator.isEmail, 'Please provide an email address'],
|
||||
match: [/\S+@\S+\.\S+/, 'is invalid'],
|
||||
trim: true,
|
||||
},
|
||||
password: {type: String, required: true, minlength: 8},
|
||||
resetPasswordToken: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
resetPasswordExpires: {
|
||||
type: Date,
|
||||
required: false,
|
||||
},
|
||||
google: {
|
||||
id: String,
|
||||
sync: {type: Boolean}, // authorisation to sync with google
|
||||
tokens: {
|
||||
accessToken: String,
|
||||
refreshToken: String,
|
||||
},
|
||||
},
|
||||
role: {
|
||||
type: String,
|
||||
enum: apiRoles,
|
||||
default: 'user',
|
||||
},
|
||||
active: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
pictureUrl: {
|
||||
type: String,
|
||||
trim: true,
|
||||
validate: {
|
||||
validator: (value: string) =>
|
||||
validator.isURL(value, {
|
||||
protocols: ['http', 'https', 'ftp'],
|
||||
require_tld: true,
|
||||
require_protocol: true,
|
||||
}),
|
||||
message: 'Must be a Valid URL',
|
||||
},
|
||||
},
|
||||
pictureBlob: {
|
||||
type: String,
|
||||
},
|
||||
lastLoginDate: {type: Date, required: false, default: null},
|
||||
notification: {
|
||||
fcmPermission: {
|
||||
type: String,
|
||||
enum: ['granted', 'denied', 'default'],
|
||||
default: 'default',
|
||||
},
|
||||
firebaseMessageToken: {type: String, trim: true, default: null},
|
||||
},
|
||||
messages: [MessageSchema],
|
||||
featureFlags: {
|
||||
allowSendEmail: {
|
||||
type: String,
|
||||
enum: ['granted', 'denied', 'default'],
|
||||
default: 'granted',
|
||||
},
|
||||
allowSendSms: {
|
||||
type: String,
|
||||
enum: ['granted', 'denied', 'default'],
|
||||
default: 'granted',
|
||||
},
|
||||
betaFeatures: {
|
||||
type: String,
|
||||
enum: ['granted', 'denied', 'default'],
|
||||
default: 'default',
|
||||
},
|
||||
darkMode: {
|
||||
type: String,
|
||||
enum: ['granted', 'denied', 'default'],
|
||||
default: 'default',
|
||||
},
|
||||
personalization: {
|
||||
type: String,
|
||||
enum: ['granted', 'denied', 'default'],
|
||||
default: 'default',
|
||||
},
|
||||
geolocationBased: {
|
||||
type: String,
|
||||
enum: ['granted', 'denied', 'default'],
|
||||
default: 'default',
|
||||
},
|
||||
security: {
|
||||
type: String,
|
||||
enum: ['granted', 'denied', 'default'],
|
||||
default: 'default',
|
||||
},
|
||||
payment: {
|
||||
type: String,
|
||||
enum: ['granted', 'denied', 'default'],
|
||||
default: 'default',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
toJSON: {
|
||||
virtuals: true,
|
||||
getters: true,
|
||||
},
|
||||
toObject: {
|
||||
virtuals: true,
|
||||
getters: true,
|
||||
},
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
|
||||
UserSchema.index({username: 1, email: 1, googleId: 1});
|
||||
|
||||
/**
|
||||
* MONGOOSE MIDDLEWARE
|
||||
*/
|
||||
UserSchema.pre<HydratedDocument<IUser, IUserMethods>>(
|
||||
'save',
|
||||
async function (next) {
|
||||
if (!this.isModified('password')) return next();
|
||||
|
||||
const salt = await bcrypt.genSalt(10);
|
||||
this.password = await bcrypt.hash(this.password, salt);
|
||||
next();
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* MONGOOSE METHODS
|
||||
*/
|
||||
UserSchema.methods.toJSON = function () {
|
||||
const userObj = this.toObject();
|
||||
userObj.id = userObj._id; // remap _id to id
|
||||
|
||||
delete userObj._id;
|
||||
delete userObj.password;
|
||||
delete userObj.__v;
|
||||
return userObj;
|
||||
};
|
||||
|
||||
UserSchema.methods.comparePassword = async function (password: string) {
|
||||
return bcrypt.compare(password, this.password);
|
||||
};
|
||||
|
||||
UserSchema.methods.generateVerificationToken = function () {
|
||||
return jwt.sign(
|
||||
{
|
||||
id: this._id,
|
||||
email: this.email,
|
||||
active: this.active,
|
||||
role: this.role,
|
||||
employeeId: this.employeeId,
|
||||
clientId: this.clientId,
|
||||
vendorId: this.vendorId,
|
||||
deleted: this.deleted,
|
||||
featureFlags: this.featureFlags,
|
||||
},
|
||||
jwtKey,
|
||||
{
|
||||
expiresIn: '1d',
|
||||
// algorithm: 'RS256',
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
UserSchema.methods.generatePasswordResetToken = async function () {
|
||||
this.resetPasswordToken = crypto.randomBytes(20).toString('hex');
|
||||
this.resetPasswordExpires = Date.now() + 3600000; // expires in an hour
|
||||
};
|
||||
|
||||
/**
|
||||
* MONGOOSE STATIC METHODS
|
||||
*/
|
||||
UserSchema.statics.checkExistingField = async function (
|
||||
field: string,
|
||||
value: string
|
||||
) {
|
||||
return this.findOne({[`${field}`]: value});
|
||||
};
|
||||
|
||||
const User = mongoose.model<IUser, IUserModel>('User', UserSchema, 'users');
|
||||
|
||||
export default User;
|
||||
|
|
@ -0,0 +1,41 @@
|
|||
import http from 'http';
|
||||
import app from '../index';
|
||||
import Logger from '../lib/logger';
|
||||
|
||||
const port = process.env.PORT || 3000;
|
||||
|
||||
app.set('port', port);
|
||||
|
||||
const server = http.createServer(app);
|
||||
|
||||
const onError = (error: NodeJS.ErrnoException): void => {
|
||||
if (error.syscall !== 'listen') {
|
||||
throw error;
|
||||
}
|
||||
|
||||
const bind = typeof port === 'string' ? `Pipe ${port}` : `Port ${port}`;
|
||||
|
||||
// handle specific listen errors with friendly messages
|
||||
switch (error.code) {
|
||||
case 'EACCES':
|
||||
console.error(`${bind} requires elevated privileges`);
|
||||
process.exit(1);
|
||||
case 'EADDRINUSE':
|
||||
console.error(`${bind} is already in use`);
|
||||
process.exit(1);
|
||||
default:
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const onListening = (): void => {
|
||||
const addr = server.address();
|
||||
const bind = typeof addr === 'string' ? `pipe ${addr}` : `port ${addr?.port}`;
|
||||
Logger.debug(`Listening on ${bind}`);
|
||||
|
||||
Logger.info(`🚀 Server listening on port ${bind}`);
|
||||
};
|
||||
|
||||
server.listen(port);
|
||||
server.on('error', onError);
|
||||
server.on('listening', onListening);
|
||||
|
|
@ -0,0 +1,119 @@
|
|||
This folder contains all the config files to link the project to Google Cloud Platform
|
||||
|
||||
## **Basic Concepts**
|
||||
|
||||
### 1. SERVICE ACCOUNT
|
||||
|
||||
To manage things like upload file to Cloud Storage, etc...
|
||||
|
||||
- Go to GCP and create a new project
|
||||
- Go to Service Account and create one service account with a descriptive name
|
||||
- Assign a role you want to the service account (i.e. "Storage Admin")
|
||||
- Create a JSON key, download and put in `./src/config/gcloud` renaming google-application-credentials.json
|
||||
- In the .env file specify the path to this file like
|
||||
- In this way we can configure the app to be linked with the service account created
|
||||
|
||||
```dotenv
|
||||
GOOGLE_APPLICATION_CREDENTIALS='./../config/gcloud/google-application-credentials.json'
|
||||
```
|
||||
|
||||
### 2. OAUTH Client ID Account
|
||||
|
||||
i.e. Used to create passport strategies with google
|
||||
|
||||
- Go to GCP
|
||||
- Go to API & Services and create one OAuth Client ID account
|
||||
- Choose Application Type -> Web application
|
||||
- Name the web client (i.e. Dev Test - Web Client Oauth 2.0 Account)
|
||||
- In Authorized javascript origins put
|
||||
|
||||
```
|
||||
Authorized JavaScript origins
|
||||
|
||||
URIs*
|
||||
|
||||
http://localhost:3000
|
||||
|
||||
Authorized redirect URIs
|
||||
|
||||
URIs*
|
||||
|
||||
http://localhost:3000/auth/google/callback
|
||||
```
|
||||
|
||||
- Copy the google client id and the google client secret and put them in the .env file like
|
||||
|
||||
```dotenv
|
||||
GOOGLE_CLIENT_ID='your-google-client-id'
|
||||
GOOGLE_CLIENT_SECRET='your-client-secret'
|
||||
```
|
||||
|
||||
- Download the json file and rename to google-web-client-secret.json
|
||||
- Then if you need to import the file in a middleware like passport or something else you can do
|
||||
|
||||
```javascript
|
||||
const OAuth2Data = require('./google-web-client-secret.json');
|
||||
|
||||
const app = express();
|
||||
|
||||
const CLIENT_ID = OAuth2Data.client.id;
|
||||
const CLIENT_SECRET = OAuth2Data.client.secret;
|
||||
const REDIRECT_URL = OAuth2Data.client.redirect;
|
||||
|
||||
const oAuth2Client = new google.auth.OAuth2(
|
||||
CLIENT_ID,
|
||||
CLIENT_SECRET,
|
||||
REDIRECT_URL,
|
||||
);
|
||||
var authed = false;
|
||||
|
||||
app.get('/', (req, res) => {
|
||||
if (!authed) {
|
||||
// Generate an OAuth URL and redirect there
|
||||
const url = oAuth2Client.generateAuthUrl({
|
||||
access_type: 'offline',
|
||||
scope: 'https://www.googleapis.com/auth/gmail.readonly',
|
||||
});
|
||||
console.log(url);
|
||||
res.redirect(url);
|
||||
} else {
|
||||
const gmail = google.gmail({ version: 'v1', auth: oAuth2Client });
|
||||
gmail.users.labels.list(
|
||||
{
|
||||
userId: 'me',
|
||||
},
|
||||
(err, res) => {
|
||||
if (err) return console.log('The API returned an error: ' + err);
|
||||
const labels = res.data.labels;
|
||||
if (labels.length) {
|
||||
console.log('Labels:');
|
||||
labels.forEach((label) => {
|
||||
console.log(`- ${label.name}`);
|
||||
});
|
||||
} else {
|
||||
console.log('No labels found.');
|
||||
}
|
||||
},
|
||||
);
|
||||
res.send('Logged in');
|
||||
}
|
||||
});
|
||||
|
||||
app.get('/auth/google/callback', function (req, res) {
|
||||
const code = req.query.code;
|
||||
if (code) {
|
||||
// Get an access token based on our OAuth code
|
||||
oAuth2Client.getToken(code, function (err, tokens) {
|
||||
if (err) {
|
||||
console.log('Error authenticating');
|
||||
console.log(err);
|
||||
} else {
|
||||
console.log('Successfully authenticated');
|
||||
oAuth2Client.setCredentials(tokens);
|
||||
authed = true;
|
||||
res.redirect('/');
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
```
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"type": "service_account",
|
||||
"project_id": "your-project-id",
|
||||
"private_key_id": "your-private-key-id",
|
||||
"private_key": "your-private-key",
|
||||
"client_email": "service-account-email",
|
||||
"client_id": "your-client-id",
|
||||
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||
"token_uri": "https://oauth2.googleapis.com/token",
|
||||
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
||||
"client_x509_cert_url": "client_x509_cert_url"
|
||||
}
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"type": "service_account",
|
||||
"project_id": "diane-456005",
|
||||
"private_key_id": "8daa020ca93921e2c45470db1bf511939de4beef",
|
||||
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDFBoJ4S8/D6s00\nAfyHbY3dZpCQqDOcc7x6Bd9gx5y0FPUp6ZIbOHAhPqokp2EK3g9vYPkPQDjexC/2\nbrFU/dpZQpItFcbqCd9/lPJsNq+fgNSGCIdG8rN3pEol98RmkVnAKjmV1o3TpJc0\nvpP5i4OHd0dyx/c6gfNeyTbaWwzIjKCrcGFovBrfOozQzm8oOwcZxc5sFpYv8T4Y\nEqfueZSNAOgImJDN7JaJB9tpg5nfFJwEuDsERKd6oQIQvp9NJ3Kw2MqOj6r4RlL3\nB7Euk7bJPz/HfBWEhyymQADd27YHKHPD1Fw2eOG5CXYp0SFQitnpDZLgUI5OIKYs\nKgcCVq93AgMBAAECggEAHd4I7OaFMOxzvg9nsjn72FUXfi5ZYJK06YSMDSDwEuaI\nbG7xd088xEq6bS2zICbIjThUmSjBq5dvCTr/ho79niwMJRSDinADtv9Ofp64TIQx\nnhNF5Tiegc5gb9VZqw4cCRhS5sbXH38jGDD5v/RpHyHdrWXohwXdwv/atmA4wF/j\nlMGOcX0FEORQbZ9a1q3HsQXJVBrdmrFsmd8lCdxXBNPmO2iXANbw8SAVmvuQVArk\n2X9Q5wJPc3+bSZDVF7GZXaoPnrYtwSMGUH2gNtg3QLWmUxpkMtmG/rRhU0cSwTfl\nZmeIsl/raoRUBk2Vd+NfBZr/w/6XtT6Ze1/hXxbV6QKBgQDsTuaIOiOKFwCtnDez\nW2Xo3WZglojJ3/sdvhW16bDkZ1eWiGXva7GO00WKta4cPyP3KoxXTnZxOLAcHfco\nZiphJkjB0lQflMhz5u9De+JU95WrKAIl0s1/Sv6BIzBfZcIL1XXtzw0tq761bRtS\nNC9uItgREr4gX877HA20hHvfiQKBgQDVcZmm34/5j6JbW13Ul6CUphhGVs0AXK50\nM4gdrqqYIIkjEVB9Ni+hD9Ad2iAA58D3rWvSgYnURpPnLS3XM10wnoJGeygXMEvo\nmdSvBCN1XsQJ/l2Lj/5aSaAOz2F3GWA5EZdNgUe1/8DVU54CeKP1wxcueD/dIMTb\nnVo6RUNW/wKBgHGKY4f271aNQN0p3zWFZ8zgfC1Shv0Aaoba61GRrFXCNbp1ZQ0J\noLGwX4yLSNH3oI9E2VOltpEmHLAV0ciOdjRhkbnXFmZqNXpC7pltL82FfFtViNql\nk+linjBsOPTNTtQix1vxDTLxf1tqxiLUQinYAhsJ92JUxn2u+ALRWTeJAoGAYati\n+RZSBouwaoeLjy13IK5Ea2Nq2WCPv8KY5aQ4kfZJao+QuksiTlwzCoX2oRNrnKpx\nrVjzXfyRz3ZABLqPSSEvUdsnRD0obx59UTzekOW1ZTFNUwCoDl6kbEJ/QgWNn2+q\nQaAH1YNblQJ3SoAz3tDP+cayypglHK2LTSDGqLcCgYAij/uPfaTFE3fSvmBuR7Q2\nd1MrirtqsUPfqjRxo3RfROIKyoA2VRmO5i3CYdOLTdiWPNxt0hB1GqHvjlrzckvv\nBxhFC9x8Qxvbr7JQV36fTu8b9oUB5HhRZLTNQZ5T8jXGN4D1UmcczNUqOSnRVJq9\nt+vHGFvscoac5+R980mV2g==\n-----END PRIVATE KEY-----\n",
|
||||
"client_email": "diane-serviceaccount@diane-456005.iam.gserviceaccount.com",
|
||||
"client_id": "114388034961450557627",
|
||||
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||
"token_uri": "https://oauth2.googleapis.com/token",
|
||||
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
||||
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/diane-serviceaccount%40diane-456005.iam.gserviceaccount.com",
|
||||
"universe_domain": "googleapis.com"
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"web": {
|
||||
"client_id": "your-google-client-id",
|
||||
"project_id": "your-google-project-id",
|
||||
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||
"token_uri": "https://oauth2.googleapis.com/token",
|
||||
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
||||
"client_secret": "your-google-client-secret",
|
||||
"redirect_uris": ["http://localhost:3000/auth/google/callback"],
|
||||
"javascript_origins": ["http://localhost:3000"]
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
import mongoose from 'mongoose';
|
||||
import Logger from '../lib/logger';
|
||||
import {CustomError} from '../errors/CustomError.error';
|
||||
|
||||
mongoose.set('strictQuery', true);
|
||||
|
||||
mongoose.connection.on('connected', () => {
|
||||
Logger.info('MongoDB connection established');
|
||||
});
|
||||
|
||||
mongoose.connection.on('reconnected', () => {
|
||||
Logger.warn('MongoDB reconnected');
|
||||
});
|
||||
|
||||
mongoose.connection.on('disconnected', () => {
|
||||
Logger.warn('MongoDB disconnected');
|
||||
});
|
||||
|
||||
mongoose.connection.on('close', () => {
|
||||
Logger.warn('MongoDB connection closed');
|
||||
});
|
||||
|
||||
mongoose.connection.on('error', (error: string) => {
|
||||
Logger.error(`🤦🏻 MongoDB ERROR: ${error}`);
|
||||
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
export default {
|
||||
mongoDbProdConnection: async () => {
|
||||
try {
|
||||
await mongoose.connect(<string>process.env.MONGO_URI);
|
||||
Logger.info(`Connected to db: ${mongoose.connection.name}`);
|
||||
} catch (error) {
|
||||
Logger.error(`Production - MongoDB connection error. ${error}`);
|
||||
if (error instanceof CustomError) {
|
||||
throw new CustomError(error.statusCode, error.message);
|
||||
}
|
||||
}
|
||||
},
|
||||
mongoDBTestConnection: async () => {
|
||||
try {
|
||||
await mongoose.connect(<string>process.env.MONGO_URI_TEST);
|
||||
Logger.info(`Connected to db: ${mongoose.connection.name}`);
|
||||
} catch (error) {
|
||||
Logger.error('Test - MongoDB connection error' + error);
|
||||
if (error instanceof CustomError) {
|
||||
throw new CustomError(500, error.message);
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
|
|
@ -0,0 +1,45 @@
|
|||
import {createPool, Pool} from 'mysql2';
|
||||
|
||||
/**
|
||||
* If you would like to run the inserts asynchronously, you will want createPool.
|
||||
* Because in with createConnection, there is only 1 connection and all queries
|
||||
* executed on that connection are queued, and that is not really asynchronous.
|
||||
* (Async from node.js perspective, but the queries are executed sequentially)
|
||||
* @type {Pool}
|
||||
*/
|
||||
const mySqlTestConnection: Pool = createPool({
|
||||
host: process.env.MYSQL_HOST_STAGE,
|
||||
user: process.env.MYSQL_USER_STAGE,
|
||||
password: process.env.MYSQL_PASSWORD_STAGE,
|
||||
database: process.env.MYSQL_DB_STAGE,
|
||||
waitForConnections: true,
|
||||
connectionLimit: 10,
|
||||
queueLimit: 0,
|
||||
// socketPath:
|
||||
// process.env.NODE_ENV !== 'production' ? '' : process.env.MYSQL_SOCKET_STAGE,
|
||||
});
|
||||
|
||||
const mySqlProdConnection: Pool = createPool({
|
||||
host: process.env.MYSQL_HOST_PROD,
|
||||
user: process.env.MYSQL_USER_PROD,
|
||||
password: process.env.MYSQL_PASSWORD_PROD,
|
||||
database: process.env.MYSQL_DB_PROD,
|
||||
waitForConnections: true,
|
||||
connectionLimit: 10,
|
||||
queueLimit: 0,
|
||||
// socketPath:
|
||||
// process.env.NODE_ENV !== 'production' ? '' : process.env.MYSQL_SOCKET_PROD,
|
||||
});
|
||||
|
||||
// TODO: When ready uncomment this and use the prod db
|
||||
export const mySqlConnection =
|
||||
process.env.NODE_ENV !== 'production'
|
||||
? mySqlTestConnection.promise()
|
||||
: mySqlProdConnection.promise();
|
||||
|
||||
/**
|
||||
* Example of query on pre-existing database
|
||||
*/
|
||||
|
||||
// const query = `# SELECT * FROM users`;
|
||||
// const [rows] = await connection.execute(query, [limit]);
|
||||
|
|
@ -0,0 +1,236 @@
|
|||
import dotenv from 'dotenv';
|
||||
import passport from 'passport';
|
||||
import passportLocal, {IStrategyOptionsWithRequest} from 'passport-local';
|
||||
import passportGoogle from 'passport-google-oauth20';
|
||||
|
||||
import User, {IUser} from '../api/v1/user/user.model';
|
||||
import Logger from '../lib/logger';
|
||||
import {ICustomExpressRequest} from '../middlewares/currentUser.middleware';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const {GOOGLE_CLIENT_ID, GOOGLE_CLIENT_SECRET} = process.env;
|
||||
|
||||
const LocalStrategy = passportLocal.Strategy;
|
||||
const GoogleStrategy = passportGoogle.Strategy;
|
||||
|
||||
passport.serializeUser((user, done) => {
|
||||
/* Store only the id in passport req.session.passport.user */
|
||||
done(null, user);
|
||||
});
|
||||
|
||||
passport.deserializeUser((id, done) => {
|
||||
User.findOne({_id: id}, (err: NativeError, user: IUser) => {
|
||||
done(null, user);
|
||||
});
|
||||
});
|
||||
|
||||
const authFields: IStrategyOptionsWithRequest = {
|
||||
usernameField: 'email',
|
||||
passwordField: 'password',
|
||||
passReqToCallback: true,
|
||||
};
|
||||
|
||||
/**
|
||||
* Login strategy
|
||||
*/
|
||||
passport.use(
|
||||
'login',
|
||||
new LocalStrategy(
|
||||
authFields,
|
||||
async (req: ICustomExpressRequest, email, password, cb) => {
|
||||
try {
|
||||
const user = await User.findOne({
|
||||
$or: [{email}, {username: email.toLowerCase()}],
|
||||
}).exec();
|
||||
|
||||
if (!user || !user.password) {
|
||||
return cb(null, false, {message: 'User not found.'});
|
||||
}
|
||||
|
||||
const checkPassword = await user.comparePassword(password);
|
||||
|
||||
if (!checkPassword) {
|
||||
return cb(null, false, {message: 'Incorrect email or password.'});
|
||||
}
|
||||
|
||||
if (!user || !user.active) {
|
||||
return cb(null, false, {message: 'Account is deactivated.'});
|
||||
}
|
||||
|
||||
const {active} = user;
|
||||
|
||||
if (!active) {
|
||||
return cb(null, false, {message: 'Account is deactivated.'});
|
||||
}
|
||||
|
||||
user.lastLoginDate = new Date();
|
||||
await user.save();
|
||||
|
||||
return cb(null, user, {message: 'Logged In Successfully'});
|
||||
} catch (err: unknown) {
|
||||
if (err instanceof Error) {
|
||||
Logger.debug(err);
|
||||
return cb(null, false, {message: err.message});
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
/**
|
||||
* Sign up strategy
|
||||
*/
|
||||
passport.use(
|
||||
'signup',
|
||||
new LocalStrategy(authFields, async (req, email, password, cb) => {
|
||||
try {
|
||||
const checkEmail = await User.checkExistingField('email', email);
|
||||
|
||||
if (checkEmail) {
|
||||
return cb(null, false, {
|
||||
message: 'Email already registered, log in instead',
|
||||
});
|
||||
}
|
||||
|
||||
const checkUserName = await User.checkExistingField(
|
||||
'username',
|
||||
req.body.username
|
||||
);
|
||||
|
||||
if (checkUserName) {
|
||||
return cb(null, false, {
|
||||
message: 'Username exists, please try another',
|
||||
});
|
||||
}
|
||||
|
||||
const newUser = new User();
|
||||
newUser.email = req.body.email;
|
||||
newUser.password = req.body.password;
|
||||
newUser.username = req.body.username;
|
||||
|
||||
await newUser.save();
|
||||
|
||||
return cb(null, newUser);
|
||||
} catch (err: unknown) {
|
||||
if (err instanceof Error) {
|
||||
Logger.debug(err);
|
||||
return cb(null, false, {message: err.message});
|
||||
}
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
/**
|
||||
* The password Reset method is with a token generated
|
||||
*/
|
||||
passport.use(
|
||||
'reset-password',
|
||||
new LocalStrategy(authFields, async (req, email, password, cb) => {
|
||||
try {
|
||||
const {token} = await req.body;
|
||||
|
||||
const user = await User.findOne({
|
||||
resetPasswordToken: token,
|
||||
resetPasswordExpires: {$gt: Date.now()},
|
||||
}).exec();
|
||||
|
||||
if (!user) {
|
||||
return cb(null, false, {
|
||||
message: 'Password reset token is invalid or has expired.',
|
||||
});
|
||||
}
|
||||
|
||||
user.password = password;
|
||||
user.resetPasswordToken = undefined;
|
||||
user.resetPasswordExpires = undefined;
|
||||
|
||||
await user.save();
|
||||
|
||||
return cb(null, user, {message: 'Password Changed Successfully'});
|
||||
} catch (err: unknown) {
|
||||
if (err instanceof Error) {
|
||||
Logger.debug(err);
|
||||
return cb(null, false, {message: err.message});
|
||||
}
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
/**
|
||||
* Google strategy
|
||||
*/
|
||||
passport.use(
|
||||
'google',
|
||||
new GoogleStrategy(
|
||||
{
|
||||
clientID: <string>GOOGLE_CLIENT_ID,
|
||||
clientSecret: <string>GOOGLE_CLIENT_SECRET,
|
||||
callbackURL: `/api/v1/${process.env.SERVICE_NAME}/auth/google/callback`,
|
||||
},
|
||||
async (accessToken, refreshToken, profile, done) => {
|
||||
try {
|
||||
const username = profile.emails && profile?.emails[0]?.value;
|
||||
const email = profile.emails && profile?.emails[0]?.value;
|
||||
const pictureUrl = profile.photos && profile.photos[0].value;
|
||||
|
||||
// 1. Check if user has already a Google profile and return it
|
||||
const googleUser = await User.findOne({
|
||||
'google.id': profile.id,
|
||||
}).exec();
|
||||
|
||||
if (googleUser) {
|
||||
return done(null, googleUser, {statusCode: 200});
|
||||
}
|
||||
|
||||
// 2. If user email is in the db and tries to google auth
|
||||
// update only with Google id and token
|
||||
const checkEmail = await User.checkExistingField(
|
||||
'email',
|
||||
<string>email
|
||||
);
|
||||
|
||||
const fieldsToUpdate = {
|
||||
pictureUrl,
|
||||
'google.id': profile.id,
|
||||
'google.sync': true,
|
||||
'google.tokens.accessToken': accessToken,
|
||||
'google.tokens.refreshToken': refreshToken,
|
||||
};
|
||||
|
||||
if (checkEmail) {
|
||||
const user = await User.findByIdAndUpdate(
|
||||
checkEmail._id,
|
||||
fieldsToUpdate,
|
||||
{new: true}
|
||||
).exec();
|
||||
|
||||
return done(null, <IUser>user, {statusCode: 200});
|
||||
}
|
||||
|
||||
// 3. If nothing before is verified create a new User
|
||||
const userObj = new User({
|
||||
username, // the same as the email
|
||||
email,
|
||||
pictureUrl,
|
||||
password: accessToken,
|
||||
'google.id': profile.id,
|
||||
'google.sync': true,
|
||||
'google.tokens.accessToken': accessToken,
|
||||
'google.tokens.refreshToken': refreshToken,
|
||||
});
|
||||
|
||||
const user = await userObj.save({validateBeforeSave: false});
|
||||
|
||||
return done(null, user, {statusCode: 201});
|
||||
} catch (err: unknown) {
|
||||
if (err instanceof Error) {
|
||||
Logger.debug(err);
|
||||
return done(err, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
export default passport;
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
export class CustomError extends Error {
|
||||
public statusCode: number;
|
||||
public message: string;
|
||||
|
||||
constructor(statusCode: number, message: string) {
|
||||
super(message);
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.statusCode = statusCode;
|
||||
this.message = message;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
import {CustomError} from './CustomError.error';
|
||||
|
||||
export class NotAuthorizedError extends CustomError {
|
||||
constructor(message: string) {
|
||||
super(401, message);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
import {CustomError} from './CustomError.error';
|
||||
|
||||
export class NotFoundError extends CustomError {
|
||||
constructor(message: string) {
|
||||
super(404, message);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
export * from './CustomError.error';
|
||||
export * from './NotAuthorized.error';
|
||||
export * from './NotFound.error';
|
||||
|
|
@ -0,0 +1,193 @@
|
|||
import dotenv from 'dotenv';
|
||||
import express from 'express';
|
||||
import compression from 'compression';
|
||||
import helmet from 'helmet';
|
||||
import xss from 'xss-clean';
|
||||
import cors from 'cors';
|
||||
import cookieParser from 'cookie-parser';
|
||||
import mongoSanitize from 'express-mongo-sanitize';
|
||||
import session from 'express-session';
|
||||
import MongoStore from 'connect-mongo';
|
||||
import passport from 'passport';
|
||||
import {initializeApp, applicationDefault} from 'firebase-admin/app';
|
||||
import mongoose from 'mongoose';
|
||||
import Logger from './lib/logger';
|
||||
import morganMiddleware from './middlewares/morgan.middleware';
|
||||
import {currentUserMiddleware} from './middlewares/currentUser.middleware';
|
||||
import errorHandleMiddleware from './middlewares/errorHandler.middleware';
|
||||
import {NotFoundError} from './errors/NotFound.error';
|
||||
import apiV1Router from './api/v1/index.route';
|
||||
import mongoDbConfiguration from './config/mongodb.config';
|
||||
// import path from 'path';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const {mongoDBTestConnection, mongoDbProdConnection} = mongoDbConfiguration;
|
||||
|
||||
/**
|
||||
* Connect to MongoDB
|
||||
*/
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
mongoDBTestConnection().catch(error => {
|
||||
Logger.error(error.message);
|
||||
});
|
||||
} else {
|
||||
mongoDbProdConnection().catch(error => {
|
||||
Logger.error(error.message);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Import agenda jobs
|
||||
*/
|
||||
import './jobs/agenda';
|
||||
|
||||
/**
|
||||
* Initialize Firebase Admin SDK
|
||||
*/
|
||||
initializeApp({
|
||||
credential: applicationDefault(),
|
||||
});
|
||||
|
||||
/**
|
||||
* Initialize express app
|
||||
*/
|
||||
const app = express();
|
||||
|
||||
// trust proxy
|
||||
app.set('trust proxy', true);
|
||||
|
||||
// logger middleware
|
||||
app.use(morganMiddleware);
|
||||
|
||||
// set security HTTP headers
|
||||
app.use(
|
||||
helmet({
|
||||
contentSecurityPolicy: false,
|
||||
crossOriginEmbedderPolicy: false, // set this false to prevent bug in new browser
|
||||
})
|
||||
);
|
||||
|
||||
// parse body request
|
||||
app.use(express.json());
|
||||
|
||||
// parse urlencoded request
|
||||
app.use(express.urlencoded({extended: true}));
|
||||
|
||||
// sanitize
|
||||
app.use(xss());
|
||||
app.use(mongoSanitize());
|
||||
|
||||
// use GZIP compression
|
||||
app.use(compression());
|
||||
|
||||
// parse cookie
|
||||
app.use(cookieParser());
|
||||
|
||||
// Cookie policy definition
|
||||
const COOKIE_MAX_AGE: string | number =
|
||||
process.env.COOKIE_MAX_AGE || 1000 * 60 * 60 * 24;
|
||||
const SECRET = <string>process.env.JWT_KEY;
|
||||
|
||||
/**
|
||||
* FIX:
|
||||
* We reusing the mongoose connection to avoid the error:
|
||||
* workaround for Jest that crashes when using mongoUrl option
|
||||
*/
|
||||
const mongoStore = MongoStore.create({
|
||||
client: mongoose.connection.getClient(),
|
||||
stringify: false,
|
||||
autoRemove: 'interval',
|
||||
autoRemoveInterval: 1,
|
||||
});
|
||||
|
||||
app.use(
|
||||
session({
|
||||
cookie: {
|
||||
// secure: DEFAULT_ENV === 'production',
|
||||
maxAge: <number>COOKIE_MAX_AGE,
|
||||
httpOnly: true,
|
||||
sameSite: 'lax',
|
||||
},
|
||||
secret: SECRET,
|
||||
resave: false,
|
||||
saveUninitialized: false,
|
||||
/* Store session in mongodb */
|
||||
store: mongoStore,
|
||||
unset: 'destroy',
|
||||
})
|
||||
);
|
||||
|
||||
/**
|
||||
* currentUser middleware. It will set the current user in the request
|
||||
*/
|
||||
app.use(currentUserMiddleware);
|
||||
|
||||
/**
|
||||
* Initialize Passport and pass the session to session storage of express
|
||||
* Strategies are called in the auth router
|
||||
* and in ./src/config/passport.config.ts
|
||||
*/
|
||||
app.use(passport.initialize());
|
||||
app.use(passport.session());
|
||||
|
||||
/**
|
||||
* CORS configuration
|
||||
*/
|
||||
app.use(
|
||||
cors({
|
||||
origin: process.env.CLIENT_URL || '*', // allow CORS
|
||||
methods: 'GET,HEAD,PUT,PATCH,POST,DELETE',
|
||||
credentials: true, // allow session cookie from browser to pass through
|
||||
})
|
||||
);
|
||||
|
||||
/**
|
||||
* Headers configuration
|
||||
*/
|
||||
app.use((req, res, next) => {
|
||||
res.header('Access-Control-Allow-Origin', process.env.CLIENT_URL); // Update to match the domain you will make the request from
|
||||
res.header(
|
||||
'Access-Control-Allow-Headers',
|
||||
'Origin, X-Requested-With, Content-Type, Accept'
|
||||
);
|
||||
next();
|
||||
});
|
||||
|
||||
/**
|
||||
* This MIDDLEWARE is to serve the public client build and redirect everything
|
||||
* to the client index.html. Replace the original one with public. Move build
|
||||
* inside the server folder and activate also the catchall middleware.
|
||||
*/
|
||||
// app.use(
|
||||
// express.static(path.join(__dirname, '../public'), {
|
||||
// index: 'index.html',
|
||||
// })
|
||||
// );
|
||||
|
||||
/**
|
||||
* Routes definitions
|
||||
*/
|
||||
app.use(`/api/v1/${process.env.SERVICE_NAME}`, apiV1Router);
|
||||
|
||||
/**
|
||||
* Catchall middleware. Activate to serve every route in
|
||||
* the public directory i.e. if we have a build of React
|
||||
*/
|
||||
// app.use((req, res) =>
|
||||
// res.sendFile(path.resolve(path.join(__dirname, '../public/index.html')))
|
||||
// );
|
||||
|
||||
/**
|
||||
* Catchall middleware. Activate to serve every route in throw an error if the route is not found
|
||||
*/
|
||||
app.all('*', () => {
|
||||
throw new NotFoundError('Route not found');
|
||||
});
|
||||
|
||||
/**
|
||||
* Global Error handler middleware
|
||||
*/
|
||||
app.use(errorHandleMiddleware);
|
||||
|
||||
export default app;
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
import dotenv from 'dotenv';
|
||||
import {Agenda} from '@hokify/agenda';
|
||||
import Logger from '../lib/logger';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const {MONGO_URI, MONGO_URI_TEST} = process.env;
|
||||
|
||||
interface AgendaDBOptions {
|
||||
address: string;
|
||||
collection?: string;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
options?: any;
|
||||
ensureIndex?: boolean;
|
||||
}
|
||||
|
||||
const agenda = new Agenda({
|
||||
db: {
|
||||
address:
|
||||
<string>process.env.NODE_ENV === 'production'
|
||||
? <string>MONGO_URI
|
||||
: <string>MONGO_URI_TEST,
|
||||
collection: 'agendaJobs',
|
||||
options: {
|
||||
useNewUrlParser: true,
|
||||
useUnifiedTopology: true,
|
||||
},
|
||||
ensureIndex: true,
|
||||
// maxConcurrency: 20,
|
||||
} as AgendaDBOptions,
|
||||
});
|
||||
|
||||
/**
|
||||
* CRON JOB
|
||||
* @description Check if agenda is working
|
||||
*/
|
||||
agenda.define('check_agenda_status', async job => {
|
||||
Logger.info('Agenda is working!', job.attrs.data);
|
||||
});
|
||||
|
||||
(async function () {
|
||||
const dailyAgendaStatusCheck = agenda.create('check_agenda_status');
|
||||
|
||||
await agenda.start();
|
||||
|
||||
dailyAgendaStatusCheck.repeatEvery('0 8 * * 1-7', {
|
||||
skipImmediate: true,
|
||||
timezone: 'Europe/Rome',
|
||||
});
|
||||
|
||||
dailyAgendaStatusCheck.unique({jobId: 0});
|
||||
|
||||
await dailyAgendaStatusCheck.save();
|
||||
})();
|
||||
|
|
@ -0,0 +1,98 @@
|
|||
import winston from 'winston';
|
||||
|
||||
/**
|
||||
* Define your severity levels.
|
||||
* With them, You can create log files,
|
||||
*see or hide levels based on the running ENV.
|
||||
*/
|
||||
const levels = {
|
||||
error: 0,
|
||||
warn: 1,
|
||||
info: 2,
|
||||
http: 3,
|
||||
debug: 4,
|
||||
};
|
||||
|
||||
/**
|
||||
* This method set the current severity based on
|
||||
* the current NODE_ENV: show all the log levels
|
||||
* if the server was run in development mode; otherwise,
|
||||
* if it was run in production, show only warn and error messages.
|
||||
*/
|
||||
const level = () => {
|
||||
const env = process.env.NODE_ENV || 'development';
|
||||
const isDevelopment = env === 'development';
|
||||
return isDevelopment ? 'debug' : 'warn';
|
||||
};
|
||||
|
||||
/**
|
||||
* Define different colors for each level.
|
||||
* Colors make the log message more visible,
|
||||
* adding the ability to focus or ignore messages.
|
||||
*/
|
||||
const colors = {
|
||||
error: 'red',
|
||||
warn: 'yellow',
|
||||
info: 'green',
|
||||
http: 'magenta',
|
||||
debug: 'white',
|
||||
};
|
||||
|
||||
/**
|
||||
* Tell winston that you want to link the colors
|
||||
* defined above to the severity levels.
|
||||
*/
|
||||
winston.addColors(colors);
|
||||
|
||||
// Chose the aspect of your log customizing the log format.
|
||||
const format = winston.format.combine(
|
||||
// Add the message timestamp with the preferred format
|
||||
winston.format.timestamp({format: 'YYYY-MM-DD HH:mm:ss:ms'}),
|
||||
/**
|
||||
* Tell Winston that the logs must be colored but
|
||||
* we bypass this global formatting colorize because generates
|
||||
* wrong output characters in file. Add in transports
|
||||
*/
|
||||
// winston.format.colorize({all: true}),
|
||||
// Define the format of the message showing the timestamp, the level and the message
|
||||
winston.format.printf(
|
||||
info => `${info.timestamp} ${info.level}: ${info.message}`
|
||||
)
|
||||
);
|
||||
|
||||
/**
|
||||
* Define which transports the logger must use to print out messages.
|
||||
* In this example, we are using three different transports
|
||||
*/
|
||||
const transports = [
|
||||
// Allow the use the console to print the messages
|
||||
new winston.transports.Console({
|
||||
format: winston.format.combine(
|
||||
// Integration to format. Tell Winston that the console logs must be colored
|
||||
winston.format.colorize({all: true})
|
||||
),
|
||||
}),
|
||||
// Allow to print all the error level messages inside the error.log file
|
||||
new winston.transports.File({
|
||||
filename: 'logs/error.log',
|
||||
level: 'error',
|
||||
}),
|
||||
/**
|
||||
* Allow to print all the error message inside the all.log file
|
||||
* (also the error log that are also printed inside the error.log(
|
||||
*/
|
||||
new winston.transports.File({filename: 'logs/all.log'}),
|
||||
];
|
||||
|
||||
/**
|
||||
* Create the logger instance that has to be exported
|
||||
* and used to log messages.
|
||||
*/
|
||||
const Logger = winston.createLogger({
|
||||
level: level(),
|
||||
levels,
|
||||
format,
|
||||
transports,
|
||||
});
|
||||
|
||||
export default Logger;
|
||||
|
|
@ -0,0 +1,79 @@
|
|||
import {Response} from 'express';
|
||||
import rateLimit from 'express-rate-limit';
|
||||
import {ICustomExpressRequest} from './currentUser.middleware';
|
||||
|
||||
/**
|
||||
* Rate limiter for api v1
|
||||
* @see https://www.npmjs.com/package/express-rate-limit
|
||||
* @description 1000 requests per 1 minute for production
|
||||
*/
|
||||
const apiV1RateLimiter = rateLimit({
|
||||
windowMs: 1 * 60 * 1000, // 1 minute
|
||||
max: 200, // Limit each IP to 200 requests per `window` (here, per 1 minute)
|
||||
standardHeaders: true, // Return rate limit info in the `RateLimit-*` headers
|
||||
legacyHeaders: false, // Disable the `X-RateLimit-*` headers
|
||||
message: async (req: ICustomExpressRequest, res: Response) => {
|
||||
return res.status(429).json({
|
||||
status: 'error',
|
||||
message: 'You have exceeded the 100 requests in 1 minute limit!',
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
* Rate limiter for development route as typedoc and swagger
|
||||
* @description 1000 requests per 1 hour for development
|
||||
*/
|
||||
const devlopmentApiLimiter = rateLimit({
|
||||
windowMs: 60 * 60 * 1000, // 59 minute
|
||||
max: 1000, // Limit each IP to 1000 requests per `window` (here, per 1 hour)
|
||||
standardHeaders: true, // Return rate limit info in the `RateLimit-*` headers
|
||||
legacyHeaders: false, // Disable the `X-RateLimit-*` headers
|
||||
message: async (req: ICustomExpressRequest, res: Response) => {
|
||||
return res.status(429).json({
|
||||
status: 'error',
|
||||
message: 'Too many requests, please try again in 10 minutes.',
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
* Rate limiter for recover password
|
||||
*/
|
||||
const recoverPasswordApiLimiter = rateLimit({
|
||||
windowMs: 1 * 60 * 1000, // 5 minute
|
||||
max: 1, // Limit each IP to 1020 requests per `window` (here, per 1 minute)
|
||||
standardHeaders: true, // Return rate limit info in the `RateLimit-*` headers
|
||||
legacyHeaders: false, // Disable the `X-RateLimit-*` headers
|
||||
message: async (req: ICustomExpressRequest, res: Response) => {
|
||||
return res.status(429).json({
|
||||
status: 'error',
|
||||
message:
|
||||
'Too many requests to recover password, please try again in 1 minute.',
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
* Rate limiter for reset password
|
||||
*/
|
||||
const resetPasswordApiLimiter = rateLimit({
|
||||
windowMs: 1 * 60 * 1000, // 1 minute
|
||||
max: 10, // Limit each IP to 10 requests per `window` (here, per 1 minute)
|
||||
standardHeaders: true, // Return rate limit info in the `RateLimit-*` headers
|
||||
legacyHeaders: false, // Disable the `X-RateLimit-*` headers
|
||||
message: async (req: ICustomExpressRequest, res: Response) => {
|
||||
return res.status(429).json({
|
||||
status: 'error',
|
||||
message:
|
||||
'Too many requests to reset password, please try again in 1 minute.',
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
export {
|
||||
apiV1RateLimiter,
|
||||
devlopmentApiLimiter,
|
||||
recoverPasswordApiLimiter,
|
||||
resetPasswordApiLimiter,
|
||||
};
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
import {NextFunction, Response} from 'express';
|
||||
import {ICustomExpressRequest} from './currentUser.middleware';
|
||||
|
||||
/**
|
||||
* A function that takes a request, response, and next function as parameters.
|
||||
*/
|
||||
export default (catchAsyncHandler: Function) =>
|
||||
async (
|
||||
request: ICustomExpressRequest,
|
||||
response: Response,
|
||||
next: NextFunction
|
||||
): Promise<void> => {
|
||||
try {
|
||||
catchAsyncHandler(request, response, next);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
};
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
/**
|
||||
* This middleware differentiate from the authenticate one
|
||||
* because is called after the authentication to retrieve
|
||||
* the jwt token stored in the cookie. This is useful to be
|
||||
* exported in a shared library
|
||||
*/
|
||||
import {NextFunction, Request, Response} from 'express';
|
||||
import jwt from 'jsonwebtoken';
|
||||
|
||||
export interface ICurrentUserPayload {
|
||||
id: string;
|
||||
email: string;
|
||||
active: boolean;
|
||||
role: string;
|
||||
employeeId: string;
|
||||
clientId: string;
|
||||
vendorId: string;
|
||||
deleted: boolean;
|
||||
featureFlags: {
|
||||
allowSendEmail: string;
|
||||
allowSendSms: string;
|
||||
betaFeatures: string;
|
||||
darkMode: string;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* An interface representing the custom Express request object.
|
||||
*/
|
||||
export interface ICustomExpressRequest extends Request {
|
||||
currentUser?: ICurrentUserPayload;
|
||||
}
|
||||
|
||||
// const secretOrPrivateKey = <string>process.env.JWT_KEY;
|
||||
|
||||
export const currentUserMiddleware = (
|
||||
req: ICustomExpressRequest,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
) => {
|
||||
if (!req.cookies?.jwt && !req.headers?.authorization) {
|
||||
return next();
|
||||
}
|
||||
try {
|
||||
if (
|
||||
req.headers.authorization &&
|
||||
req.headers.authorization.startsWith('Bearer ')
|
||||
) {
|
||||
const jwtFromBearer = req.headers?.authorization?.split(' ');
|
||||
|
||||
const jwtToken = jwtFromBearer[1];
|
||||
|
||||
req.currentUser = jwt.verify(
|
||||
jwtToken,
|
||||
// secretOrPrivateKey,
|
||||
<string>process.env.JWT_KEY
|
||||
) as ICurrentUserPayload;
|
||||
} else if (req.cookies.jwt) {
|
||||
req.currentUser = jwt.verify(
|
||||
req.cookies.jwt,
|
||||
// secretOrPrivateKey,
|
||||
<string>process.env.JWT_KEY
|
||||
) as ICurrentUserPayload;
|
||||
}
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
return next();
|
||||
};
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
/**
|
||||
* This middleware is responsible for returning a json every time
|
||||
* an error comes in. We use in the index.ts as global middleware
|
||||
*/
|
||||
import dotenv from 'dotenv';
|
||||
import {NextFunction, Request, Response} from 'express';
|
||||
import {CustomError} from '../errors';
|
||||
import Logger from '../lib/logger';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const errorHandleMiddleware = (
|
||||
err: CustomError,
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
) => {
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
let errorMessage = {};
|
||||
|
||||
if (res.headersSent) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
if (!isProduction) {
|
||||
Logger.debug(err.stack);
|
||||
errorMessage = err;
|
||||
}
|
||||
|
||||
if (err) {
|
||||
return res.status(err.statusCode || 500).json({
|
||||
status: 'error',
|
||||
statusCode: err.statusCode,
|
||||
message: err.message,
|
||||
error: {
|
||||
message: err.message,
|
||||
...(!isProduction && {trace: errorMessage}),
|
||||
},
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export default errorHandleMiddleware;
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
import morgan, {StreamOptions} from 'morgan';
|
||||
|
||||
import Logger from '../lib/logger';
|
||||
|
||||
/**
|
||||
* Override the stream method by telling
|
||||
* Morgan to use our custom logger instead of the console.log.
|
||||
*/
|
||||
const stream: StreamOptions = {
|
||||
// Use the http severity
|
||||
write: message => Logger.http(message),
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* Skip all the Morgan http log if the
|
||||
* application is not running in development mode.
|
||||
* This method is not really needed here since
|
||||
* we already told to the logger that it should print
|
||||
* only warning and error messages in production.
|
||||
*/
|
||||
const skip = () => {
|
||||
const env = process.env.NODE_ENV || 'development';
|
||||
return env !== 'development';
|
||||
};
|
||||
|
||||
// Build the morgan middleware
|
||||
const morganMiddleware = morgan(
|
||||
/**
|
||||
* Define message format string (this is the default one).
|
||||
* The message format is made from tokens, and each token is
|
||||
* defined inside the Morgan library.
|
||||
* You can create your custom token to show what do you want from a request.
|
||||
*/
|
||||
':method :url :status :res[content-length] - :response-time ms - :remote-addr - :user-agent - :date[iso]',
|
||||
/**
|
||||
* Options: in this case, I overwrote the stream and the skip logic.
|
||||
* See the methods above.
|
||||
*/
|
||||
|
||||
{stream, skip}
|
||||
);
|
||||
|
||||
export default morganMiddleware;
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
import {ICustomExpressRequest} from './currentUser.middleware';
|
||||
import {Response, NextFunction} from 'express';
|
||||
import {NotAuthorizedError} from '../errors';
|
||||
|
||||
export const requireAdminRoleMiddleware = (
|
||||
req: ICustomExpressRequest,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
) => {
|
||||
if (req.currentUser?.role !== 'admin') {
|
||||
throw new NotAuthorizedError('You are not an admin!');
|
||||
}
|
||||
next();
|
||||
};
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
import {Response, NextFunction} from 'express';
|
||||
import {NotAuthorizedError} from '../errors';
|
||||
import {ICustomExpressRequest} from './currentUser.middleware';
|
||||
|
||||
export const requireAuthenticationMiddleware = (
|
||||
req: ICustomExpressRequest,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
) => {
|
||||
if (!req.currentUser) {
|
||||
throw new NotAuthorizedError('You are not authorized! Please login!');
|
||||
}
|
||||
next();
|
||||
};
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
import {NextFunction, Response} from 'express';
|
||||
|
||||
import {apiRolesRights} from '../api/config/roles.config';
|
||||
import {NotAuthorizedError} from '../errors';
|
||||
import {ICustomExpressRequest} from './currentUser.middleware';
|
||||
|
||||
export const verifyApiRights =
|
||||
(...requiredRights: Array<string>) =>
|
||||
(req: ICustomExpressRequest, res: Response, next: NextFunction) => {
|
||||
if (requiredRights?.length) {
|
||||
const userRights = <Array<string>>(
|
||||
apiRolesRights.get(<string>req.currentUser?.role)
|
||||
);
|
||||
|
||||
const hasRequiredRights = requiredRights.every((requiredRight: string) =>
|
||||
userRights?.includes(requiredRight)
|
||||
);
|
||||
|
||||
if (
|
||||
!hasRequiredRights &&
|
||||
req.params.userId !== <string>req.currentUser?.id
|
||||
) {
|
||||
throw new NotAuthorizedError(
|
||||
'You are not authorized to use this endpoint'
|
||||
);
|
||||
}
|
||||
}
|
||||
next();
|
||||
};
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
import SparkPost from 'sparkpost';
|
||||
import {CustomError} from '../../errors';
|
||||
import Logger from '../../lib/logger';
|
||||
|
||||
/**
|
||||
* Send reset password token to user email
|
||||
* @param email
|
||||
* @param token
|
||||
* @returns Promise<SparkPost.ResultsPromise<{ total_rejected_recipients: number; total_accepted_recipients: number; id: string; }>>
|
||||
*/
|
||||
const sendResetPasswordToken = async (
|
||||
email: string,
|
||||
token: string
|
||||
): Promise<
|
||||
SparkPost.ResultsPromise<{
|
||||
total_rejected_recipients: number;
|
||||
total_accepted_recipients: number;
|
||||
id: string;
|
||||
}>
|
||||
> => {
|
||||
const {SPARKPOST_API_KEY, SPARKPOST_SENDER_DOMAIN} = process.env;
|
||||
try {
|
||||
const euClient = new SparkPost(SPARKPOST_API_KEY, {
|
||||
origin: 'https://api.eu.sparkpost.com:443',
|
||||
});
|
||||
|
||||
const transmission = {
|
||||
recipients: [
|
||||
{
|
||||
address: {
|
||||
email,
|
||||
name: email,
|
||||
},
|
||||
},
|
||||
],
|
||||
content: {
|
||||
from: {
|
||||
email: `support@${SPARKPOST_SENDER_DOMAIN}`,
|
||||
name: 'Support Email',
|
||||
},
|
||||
subject: 'Reset your password',
|
||||
reply_to: `support@${SPARKPOST_SENDER_DOMAIN}`,
|
||||
text: `Hello ${email}, we heard you lost your password. You can recover with this token: ${token}`,
|
||||
},
|
||||
};
|
||||
return await euClient.transmissions.send(transmission);
|
||||
} catch (error) {
|
||||
Logger.error(error);
|
||||
if (error instanceof CustomError) {
|
||||
throw new CustomError(error.statusCode, error.message);
|
||||
}
|
||||
// here we are throwing an error instead of returning it
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export {sendResetPasswordToken};
|
||||
|
|
@ -0,0 +1,193 @@
|
|||
import {Message, PubSub, Subscription, Topic} from '@google-cloud/pubsub';
|
||||
import DatabaseLog, {
|
||||
IDatabaseLog,
|
||||
} from '../../api/v1/database-logs/databaseLog.model';
|
||||
import Logger from '../../lib/logger';
|
||||
import {HydratedDocument} from 'mongoose';
|
||||
|
||||
const pubSubClient = new PubSub();
|
||||
|
||||
/**
|
||||
* declare custom payload interface
|
||||
*/
|
||||
export interface IPubSubPayload<T> {
|
||||
[key: string]: T;
|
||||
}
|
||||
/**
|
||||
* declare custom error interface
|
||||
*/
|
||||
export interface IPubSubPublishError extends Error {
|
||||
statusCode: number;
|
||||
}
|
||||
|
||||
export type TPubSubMessage = Message;
|
||||
|
||||
/**
|
||||
* declare custom error class for PubSub publish error
|
||||
* We define a custom class since we want to throw a custom error with a custom status code
|
||||
*/
|
||||
class PubSubPublishError extends Error implements IPubSubPublishError {
|
||||
statusCode: number;
|
||||
|
||||
constructor(message: string, statusCode: number) {
|
||||
super(message);
|
||||
this.statusCode = statusCode;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Publish message to PubSub
|
||||
* @param payload
|
||||
* @param topicName
|
||||
* @returns
|
||||
*/
|
||||
const publishMessageToPubSubTopic = async <T>(
|
||||
payload: IPubSubPayload<T>,
|
||||
topicName: string
|
||||
): Promise<string> => {
|
||||
try {
|
||||
const dataBuffer = Buffer.from(JSON.stringify(payload));
|
||||
|
||||
const topic: Topic = pubSubClient.topic(topicName);
|
||||
|
||||
if (!(await topic.exists())) {
|
||||
throw new PubSubPublishError(`Topic ${topicName} does not exist`, 404);
|
||||
}
|
||||
|
||||
const message = {
|
||||
data: dataBuffer,
|
||||
};
|
||||
|
||||
const response = await topic.publishMessage(message);
|
||||
|
||||
return response;
|
||||
} catch (error) {
|
||||
Logger.error(error);
|
||||
if (error instanceof PubSubPublishError) {
|
||||
throw error;
|
||||
} else {
|
||||
throw new PubSubPublishError(
|
||||
`Failed to publish message to topic ${topicName} with error: ${error}`,
|
||||
404
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* @param subscriptionName
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
const listenForPubSubPullSubscription = async (
|
||||
subscriptionName: string,
|
||||
timeout: number
|
||||
): Promise<string> => {
|
||||
try {
|
||||
const subscriberOptions = {
|
||||
flowControl: {
|
||||
maxMessages: 10,
|
||||
},
|
||||
};
|
||||
|
||||
const subscription: Subscription = pubSubClient.subscription(
|
||||
subscriptionName,
|
||||
subscriberOptions
|
||||
);
|
||||
|
||||
const checkSubscriptionExists = await subscription.exists();
|
||||
|
||||
/**
|
||||
* Check if subscription exists
|
||||
*/
|
||||
if (!checkSubscriptionExists[0]) {
|
||||
throw new PubSubPublishError(
|
||||
`Subscription ${subscriptionName} does not exist`,
|
||||
404
|
||||
);
|
||||
}
|
||||
|
||||
// Instantiate the message counter
|
||||
let messageCount = 0;
|
||||
|
||||
/**
|
||||
* Create an event handler to handle messages
|
||||
* @param message
|
||||
*/
|
||||
const messageHandler = async (message: TPubSubMessage): Promise<void> => {
|
||||
const data = Buffer.from(message.data).toString('utf8');
|
||||
|
||||
const response = JSON.parse(data);
|
||||
|
||||
/**
|
||||
* Create a database log for the message retrieved from PubSub
|
||||
* This is jsut for testing purposes to see if the message is being received
|
||||
*/
|
||||
const databaseLog: HydratedDocument<IDatabaseLog> = new DatabaseLog({
|
||||
type: 'pubsub-message',
|
||||
date: new Date(),
|
||||
level: 'info',
|
||||
details: {
|
||||
channel: 'pubsub',
|
||||
message: 'Message retried from PubSub pull subscription',
|
||||
status: 'SUCCESS',
|
||||
response: {
|
||||
...response,
|
||||
messageId: message.id,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await databaseLog.save();
|
||||
|
||||
Logger.debug(`Received message ${message.id}:`);
|
||||
Logger.debug(`\tData: ${message.data}`);
|
||||
Logger.debug(`\tAttributes: ${JSON.stringify(message.attributes)}`);
|
||||
messageCount += 1;
|
||||
|
||||
message.ack();
|
||||
};
|
||||
|
||||
subscription.on('message', messageHandler);
|
||||
|
||||
/**
|
||||
* Create an error handler to handle errors
|
||||
* @param error
|
||||
*/
|
||||
const errorHandler = (error: Error): void => {
|
||||
Logger.error(`Error: ${error}`);
|
||||
subscription.removeListener('message', messageHandler);
|
||||
};
|
||||
|
||||
subscription.on('error', errorHandler);
|
||||
|
||||
/**
|
||||
* Set the timeout to 60 seconds to close the subscriptions
|
||||
*/
|
||||
setTimeout(() => {
|
||||
subscription.removeListener('message', messageHandler);
|
||||
subscription.removeListener('error', errorHandler);
|
||||
Logger.warn(
|
||||
`Subscription: ${subscriptionName} closed after ${timeout}s - ${messageCount} message(s) received.`
|
||||
);
|
||||
}, timeout * 1000);
|
||||
|
||||
return `Subscription ${subscriptionName} listening for messages`;
|
||||
} catch (error) {
|
||||
Logger.error(error);
|
||||
if (error instanceof PubSubPublishError) {
|
||||
throw error;
|
||||
} else {
|
||||
throw new PubSubPublishError(
|
||||
`Failed to pull message from topic ${subscriptionName} with error: ${error}`,
|
||||
404
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export {
|
||||
publishMessageToPubSubTopic,
|
||||
listenForPubSubPullSubscription,
|
||||
PubSubPublishError,
|
||||
};
|
||||
|
|
@ -0,0 +1,89 @@
|
|||
import {
|
||||
Client,
|
||||
DirectionsResponseData,
|
||||
Language,
|
||||
ResponseData,
|
||||
TravelMode,
|
||||
} from '@googlemaps/google-maps-services-js';
|
||||
import Logger from '../../lib/logger';
|
||||
|
||||
export interface IGoogleMapsDirections {
|
||||
origin: string;
|
||||
destination: string;
|
||||
}
|
||||
|
||||
const getGoogleMapsDirections = async (
|
||||
origin: string,
|
||||
destination: string
|
||||
): Promise<ResponseData | DirectionsResponseData> => {
|
||||
try {
|
||||
const client = new Client();
|
||||
|
||||
const response = await client.directions({
|
||||
params: {
|
||||
origin,
|
||||
destination,
|
||||
mode: <TravelMode>'driving',
|
||||
language: <Language>'it',
|
||||
key: <string>process.env.GOOGLE_MAPS_API_KEY,
|
||||
},
|
||||
});
|
||||
|
||||
// if Google Maps API returns OK create an object to use with mongodb
|
||||
if (response.data.status === 'OK') {
|
||||
const direction = response.data.routes[0].legs[0];
|
||||
|
||||
const distanceObject = {
|
||||
status: response.data.status,
|
||||
error_message: response.data.error_message,
|
||||
distance: {
|
||||
text: direction.distance.text,
|
||||
value: direction.distance.value,
|
||||
},
|
||||
duration: {
|
||||
text: direction.duration.text,
|
||||
value: direction.duration.value,
|
||||
},
|
||||
start: {
|
||||
address: direction.start_address,
|
||||
location: {
|
||||
coordinates: [
|
||||
direction.start_location.lat,
|
||||
direction.start_location.lng,
|
||||
],
|
||||
},
|
||||
},
|
||||
end: {
|
||||
address: direction.end_address,
|
||||
location: {
|
||||
coordinates: [
|
||||
direction.end_location.lat,
|
||||
direction.end_location.lng,
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
return distanceObject;
|
||||
}
|
||||
return response.data;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} catch (error: any | unknown) {
|
||||
/**
|
||||
* Google Maps API returns error in different forms
|
||||
* If we use a throw we can block the execution of the function
|
||||
* so for now we just return an object containing the error
|
||||
* to store into mongodb travel schema
|
||||
* directions returns code: error.response.status
|
||||
* directions returns error: error.response.data.status
|
||||
* directions returns error message: error.response.data.error_message
|
||||
*/
|
||||
Logger.error(error);
|
||||
return {
|
||||
geocoded_waypoints: error.response.data.geocoded_waypoints,
|
||||
status: error.response.status,
|
||||
error_message: error.response.data.error_message,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
export {getGoogleMapsDirections};
|
||||
|
|
@ -0,0 +1,108 @@
|
|||
import {getMessaging} from 'firebase-admin/messaging';
|
||||
import {CustomError} from '../../errors';
|
||||
|
||||
export interface IFirebaseMessage {
|
||||
title: string;
|
||||
body: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a single firebase message
|
||||
* @param message
|
||||
* @param userFirebaseToken
|
||||
* @returns
|
||||
*/
|
||||
const sendSingleFirebaseMessage = async (
|
||||
message: IFirebaseMessage,
|
||||
userFirebaseToken: string
|
||||
): Promise<object> => {
|
||||
const {title, body} = message;
|
||||
|
||||
const messageObject = {
|
||||
data: {
|
||||
title,
|
||||
body,
|
||||
},
|
||||
token: userFirebaseToken,
|
||||
};
|
||||
|
||||
try {
|
||||
const response = await getMessaging().send(messageObject);
|
||||
|
||||
return {message: 'Successfully sent message', response};
|
||||
} catch (error) {
|
||||
if (error instanceof CustomError) {
|
||||
throw new CustomError(error.statusCode, error.message);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Send a multicast firebase message
|
||||
* @param message
|
||||
* @param usersFirebaseTokens
|
||||
* @returns
|
||||
*/
|
||||
const sendMulticastFirebaseMessage = async (
|
||||
message: IFirebaseMessage,
|
||||
usersFirebaseTokens: Array<string>
|
||||
): Promise<{
|
||||
status: string;
|
||||
message: string;
|
||||
response: object;
|
||||
failedTokens?: string[];
|
||||
}> => {
|
||||
const {title, body} = message;
|
||||
|
||||
const messageObject = {
|
||||
data: {
|
||||
title,
|
||||
body,
|
||||
},
|
||||
tokens: usersFirebaseTokens,
|
||||
};
|
||||
|
||||
try {
|
||||
const response = await getMessaging().sendMulticast(messageObject);
|
||||
|
||||
if (response.failureCount > 0 && response.successCount > 0) {
|
||||
const failedTokens: string[] = [];
|
||||
response.responses.forEach((resp, idx) => {
|
||||
if (!resp.success) {
|
||||
failedTokens.push(usersFirebaseTokens[idx]);
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
status: 'incomplete',
|
||||
message: 'Some messages were not sent to users',
|
||||
response,
|
||||
failedTokens,
|
||||
};
|
||||
} else if (response.successCount === 0) {
|
||||
return {
|
||||
status: 'error',
|
||||
message: 'Failed to send all messages to users',
|
||||
response,
|
||||
failedTokens: usersFirebaseTokens,
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
status: 'success',
|
||||
message: 'Successfully sent message to all users',
|
||||
response,
|
||||
failedTokens: [],
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof CustomError) {
|
||||
throw new CustomError(error.statusCode, error.message);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export {sendSingleFirebaseMessage, sendMulticastFirebaseMessage};
|
||||
|
|
@ -0,0 +1,80 @@
|
|||
import {initClient, ConversationParameter} from 'messagebird';
|
||||
import Logger from '../../lib/logger';
|
||||
import DatabaseLog, {
|
||||
IDatabaseLog,
|
||||
} from '../../api/v1/database-logs/databaseLog.model';
|
||||
import {HydratedDocument} from 'mongoose';
|
||||
|
||||
const sendWhatsappMessageWithMessagebird = (toNumber: string): void => {
|
||||
const {
|
||||
MESSAGEBIRD_ACCESS_KEY,
|
||||
MESSAGEBIRD_WHATSAPP_CHANNEL_ID,
|
||||
MESSAGEBIRD_TEMPLATE_NAMESPACE_ID,
|
||||
MESSAGEBIRD_TEMPLATE_NAME_TEST,
|
||||
} = process.env;
|
||||
|
||||
const messagebird = initClient(<string>MESSAGEBIRD_ACCESS_KEY);
|
||||
|
||||
const params: ConversationParameter = {
|
||||
to: toNumber,
|
||||
from: <string>MESSAGEBIRD_WHATSAPP_CHANNEL_ID,
|
||||
type: 'hsm',
|
||||
reportUrl: 'https://your.report.url',
|
||||
content: {
|
||||
hsm: {
|
||||
namespace: <string>MESSAGEBIRD_TEMPLATE_NAMESPACE_ID,
|
||||
templateName: <string>MESSAGEBIRD_TEMPLATE_NAME_TEST,
|
||||
language: {
|
||||
code: 'en',
|
||||
policy: 'deterministic',
|
||||
},
|
||||
components: [
|
||||
{
|
||||
type: 'body',
|
||||
parameters: [{type: 'text', text: 'Variable 1'}],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
messagebird.conversations.send(params, async (err, response) => {
|
||||
if (err) {
|
||||
Logger.error(err);
|
||||
const databaseLog: HydratedDocument<IDatabaseLog> = new DatabaseLog({
|
||||
type: 'message',
|
||||
date: new Date(),
|
||||
level: 'error',
|
||||
details: {
|
||||
channel: 'whatsapp',
|
||||
message: 'No message was sent',
|
||||
status: 'ERROR',
|
||||
response: {...err, recipient: toNumber},
|
||||
},
|
||||
});
|
||||
|
||||
await databaseLog.save();
|
||||
} else {
|
||||
console.log('response', response);
|
||||
Logger.info(response);
|
||||
|
||||
/**
|
||||
* Save the message to the database using the log model
|
||||
*/
|
||||
const databaseLog: HydratedDocument<IDatabaseLog> = new DatabaseLog({
|
||||
type: 'message',
|
||||
date: new Date(),
|
||||
level: 'info',
|
||||
details: {
|
||||
channel: 'whatsapp',
|
||||
message: <string>MESSAGEBIRD_TEMPLATE_NAME_TEST,
|
||||
status: 'SUCCESS',
|
||||
response: {...response, recipient: toNumber},
|
||||
},
|
||||
});
|
||||
await databaseLog.save();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export {sendWhatsappMessageWithMessagebird};
|
||||
|
|
@ -0,0 +1,198 @@
|
|||
import axios from 'axios';
|
||||
import PdfPrinter from 'pdfmake';
|
||||
import {Storage} from '@google-cloud/storage';
|
||||
import slugify from 'slugify';
|
||||
import {format} from 'util';
|
||||
|
||||
import {TDocumentDefinitions} from 'pdfmake/interfaces';
|
||||
import {IUploadResponse} from '../upload/upload.service';
|
||||
|
||||
/**
|
||||
* Define the storage bucket
|
||||
*/
|
||||
const storage = new Storage();
|
||||
const bucket = storage.bucket(<string>process.env.GOOGLE_STORAGE_BUCKET_NAME);
|
||||
|
||||
/**
|
||||
* Define the interface for the pdf object
|
||||
*/
|
||||
export interface IPDFObject {
|
||||
key: string;
|
||||
}
|
||||
|
||||
const generatePDF = async (
|
||||
body: IPDFObject,
|
||||
directory: string
|
||||
): Promise<IUploadResponse> => {
|
||||
/**
|
||||
* Desctructure the body
|
||||
*/
|
||||
const {key} = body;
|
||||
|
||||
/**
|
||||
* Define some constants
|
||||
*/
|
||||
const TODAY_DATE = new Intl.DateTimeFormat('it-IT').format(new Date());
|
||||
const COMPANY_NAME = 'Company Name'; // replace with your own company name
|
||||
const COMPANY_LOGO = `https://storage.googleapis.com/${process.env.GOOGLE_STORAGE_BUCKET_NAME}/company-logo.png`;
|
||||
const SERVICE_FOLDER = 'express-typescript-api-rest'; // replace with your own service folder name
|
||||
|
||||
/**
|
||||
* Get the logo image from the url
|
||||
*/
|
||||
const LOGO_IMAGE_URL = await axios
|
||||
.get(COMPANY_LOGO, {responseType: 'arraybuffer'})
|
||||
.then(res => res.data);
|
||||
|
||||
/**
|
||||
* return the array buffer for pdfmake
|
||||
*/
|
||||
const LOGO_IMAGE_BASE_64 = `data:image/png;base64,${Buffer.from(
|
||||
LOGO_IMAGE_URL
|
||||
).toString('base64')}`;
|
||||
|
||||
/**
|
||||
* Define the fonts
|
||||
*/
|
||||
const fonts = {
|
||||
Courier: {
|
||||
normal: 'Courier',
|
||||
bold: 'Courier-Bold',
|
||||
italics: 'Courier-Oblique',
|
||||
bolditalics: 'Courier-BoldOblique',
|
||||
},
|
||||
Helvetica: {
|
||||
normal: 'Helvetica',
|
||||
bold: 'Helvetica-Bold',
|
||||
italics: 'Helvetica-Oblique',
|
||||
bolditalics: 'Helvetica-BoldOblique',
|
||||
},
|
||||
Times: {
|
||||
normal: 'Times-Roman',
|
||||
bold: 'Times-Bold',
|
||||
italics: 'Times-Italic',
|
||||
bolditalics: 'Times-BoldItalic',
|
||||
},
|
||||
Symbol: {
|
||||
normal: 'Symbol',
|
||||
},
|
||||
ZapfDingbats: {
|
||||
normal: 'ZapfDingbats',
|
||||
},
|
||||
};
|
||||
|
||||
// instantiate PDFMake
|
||||
const printer = new PdfPrinter(fonts);
|
||||
|
||||
// set a general font size
|
||||
const fontSize = 12;
|
||||
|
||||
/**
|
||||
* Define the document definition
|
||||
*/
|
||||
const docDefinition: TDocumentDefinitions = {
|
||||
info: {
|
||||
title: 'PDF Document',
|
||||
author: 'Author Name',
|
||||
subject: 'Subject',
|
||||
keywords: 'Keywords',
|
||||
},
|
||||
header: (currentPage, pageCount, pageSize) => {
|
||||
return [
|
||||
{
|
||||
text: `Header: ${new Intl.DateTimeFormat('it-IT').format(
|
||||
new Date()
|
||||
)} - ${key}`,
|
||||
alignment: currentPage % 2 ? 'right' : 'right',
|
||||
fontSize: fontSize - 4,
|
||||
lineHeight: 1.2,
|
||||
margin: [20, 20, 30, 20],
|
||||
},
|
||||
{
|
||||
canvas: [
|
||||
{type: 'rect', x: 170, y: 32, w: pageSize.width - 170, h: 40},
|
||||
],
|
||||
},
|
||||
];
|
||||
},
|
||||
footer: (currentPage, pageCount, pageSize) => {
|
||||
// you can apply any logic and return any valid pdfmake element
|
||||
return [
|
||||
{
|
||||
text: 'This is a footer. You can apply any logic and return any valid pdfmake element',
|
||||
alignment: 'center',
|
||||
fontSize: fontSize - 6,
|
||||
lineHeight: 1.2,
|
||||
margin: [10, 10, 10, 10],
|
||||
},
|
||||
{
|
||||
canvas: [
|
||||
{type: 'rect', x: 170, y: 32, w: pageSize.width - 170, h: 40},
|
||||
],
|
||||
},
|
||||
];
|
||||
},
|
||||
content: [
|
||||
{
|
||||
image: LOGO_IMAGE_BASE_64,
|
||||
width: 150,
|
||||
},
|
||||
{
|
||||
text: `Some text here ${TODAY_DATE}`,
|
||||
fontSize: fontSize - 2,
|
||||
lineHeight: 1.3,
|
||||
margin: [10, 30, 10, 10],
|
||||
alignment: 'right',
|
||||
bold: true,
|
||||
},
|
||||
],
|
||||
defaultStyle: {
|
||||
font: 'Helvetica',
|
||||
},
|
||||
};
|
||||
|
||||
// This produce a stream already, so we don't need to create a new one
|
||||
const pdfBuffer = printer.createPdfKitDocument(docDefinition);
|
||||
|
||||
pdfBuffer.end();
|
||||
|
||||
/**
|
||||
* Define the file name
|
||||
*/
|
||||
const fileName = `FileName_${COMPANY_NAME.replace(/ /g, '_')}.pdf`;
|
||||
|
||||
/**
|
||||
* FINALLY, RETURN THE PROMISE PASSING THE STREAM AND THE FILENAME
|
||||
*/
|
||||
return new Promise((resolve, reject) => {
|
||||
const blob = bucket.file(
|
||||
`${SERVICE_FOLDER}/${directory}/${slugify(fileName)}`
|
||||
);
|
||||
|
||||
const blobStream = pdfBuffer.pipe(
|
||||
blob.createWriteStream({
|
||||
resumable: false,
|
||||
public: true,
|
||||
metadata: {
|
||||
contentType: 'application/pdf',
|
||||
cacheControl: 'no-store',
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
blobStream
|
||||
.on('finish', () => {
|
||||
const blobName = blob.name;
|
||||
const publicUrl = format(
|
||||
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
|
||||
);
|
||||
|
||||
resolve({publicUrl, blobName});
|
||||
})
|
||||
.on('error', error => {
|
||||
reject(error || 'unable to upload file');
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
export {generatePDF};
|
||||
|
|
@ -0,0 +1,224 @@
|
|||
import {Storage} from '@google-cloud/storage';
|
||||
import fs from 'fs';
|
||||
import slugify from 'slugify';
|
||||
import stream from 'stream';
|
||||
import {format} from 'util';
|
||||
import crypto from 'crypto';
|
||||
import {CustomError} from '../../errors';
|
||||
import Logger from '../../lib/logger';
|
||||
|
||||
export interface IUploadResponse {
|
||||
publicUrl: string;
|
||||
blobName: string;
|
||||
}
|
||||
|
||||
const storage = new Storage();
|
||||
const bucket = storage.bucket(<string>process.env.GOOGLE_STORAGE_BUCKET_NAME);
|
||||
/**
|
||||
* This function create and upload a file to the local file system
|
||||
* 0. Always pass a buffer like argument otherwise will fail
|
||||
* 1. Takes a buffer argument
|
||||
* @param buffer
|
||||
* @param filename
|
||||
*/
|
||||
const streamBufferToLFS = async (
|
||||
buffer: Buffer,
|
||||
filename: string
|
||||
): Promise<void> => {
|
||||
const file = `${filename}-${Date.now()}.xml`;
|
||||
fs.writeFile(file, buffer, err => {
|
||||
if (err) {
|
||||
console.log(err);
|
||||
} else {
|
||||
Logger.debug('The file was saved!');
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* This function upload a file directly to gcs without passing buffer.
|
||||
* 0. To make this work use multer memory storage middleware
|
||||
* 1. Only instance of a file with buffer will succeed
|
||||
* 2. Return a public url
|
||||
* @param file
|
||||
* @returns
|
||||
*/
|
||||
const uploadFileToGCS = async (
|
||||
file: Express.Multer.File
|
||||
): Promise<IUploadResponse> => {
|
||||
const RANDOM_ID = Math.random().toString(36).substring(2, 15); // replace with your own id
|
||||
const SERVICE_FOLDER = 'express-typescript-api-rest'; // replace with your own service folder name
|
||||
const DIRECTORY = `uploads/${RANDOM_ID}`; // replace with your own directory name
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const {originalname, buffer, mimetype} = file;
|
||||
|
||||
const blob = bucket.file(
|
||||
`${SERVICE_FOLDER}/${DIRECTORY}/${slugify(originalname)}`
|
||||
);
|
||||
|
||||
const blobStream = blob.createWriteStream({
|
||||
resumable: false,
|
||||
public: true,
|
||||
predefinedAcl: 'publicRead',
|
||||
metadata: {
|
||||
contentType: mimetype,
|
||||
cacheControl: 'no-store',
|
||||
},
|
||||
});
|
||||
|
||||
blobStream
|
||||
.on('finish', () => {
|
||||
const blobName = blob.name;
|
||||
const publicUrl = format(
|
||||
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
|
||||
);
|
||||
resolve({publicUrl, blobName});
|
||||
})
|
||||
.on('error', error => {
|
||||
reject(error || 'unable to upload file');
|
||||
})
|
||||
.end(buffer);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* This function take a pure buffer and convert to stream
|
||||
* 0. Always pass a buffer like argument otherwise will fail
|
||||
* 1. Takes a buffer argument
|
||||
* 2. Create a stream to store in memory
|
||||
* 3. Pipe the stream to Google Cloud Storage
|
||||
* 4. As soon as the file is recreated returns a public url
|
||||
* @return {Promise<void>}
|
||||
* @param buffer
|
||||
*/
|
||||
const streamBufferToGCS = async (buffer: Buffer): Promise<IUploadResponse> => {
|
||||
const RANDOM_ID = Math.random().toString(36).substring(2, 15); // replace with your own id
|
||||
const SERVICE_FOLDER = 'express-typescript-api-rest'; // replace with your own service folder name
|
||||
const DIRECTORY = `uploads/${RANDOM_ID}`; // replace with your own directory name
|
||||
const FILE_NAME = 'test.xml'; // replace with your own file name
|
||||
|
||||
const dataStream = new stream.PassThrough();
|
||||
|
||||
dataStream.push(buffer);
|
||||
dataStream.push(null);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const blob = bucket.file(`${SERVICE_FOLDER}/${DIRECTORY}/${FILE_NAME}`);
|
||||
|
||||
const blobStream = dataStream.pipe(
|
||||
blob.createWriteStream({
|
||||
resumable: false,
|
||||
public: true,
|
||||
predefinedAcl: 'publicRead',
|
||||
metadata: {
|
||||
cacheControl: 'no-store',
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
blobStream
|
||||
.on('finish', () => {
|
||||
const publicUrl = format(
|
||||
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
|
||||
);
|
||||
resolve({publicUrl, blobName: blob.name});
|
||||
})
|
||||
.on('error', error => {
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* This function take an object that also contain a buffer
|
||||
* 0. Always pass an object that contains buffer otherwise will fail
|
||||
* 1. Takes also a directory like argument
|
||||
* 2. Create a stream to store in memory
|
||||
* 3. Pipe the stream to Google Cloud Storage
|
||||
* 4. As soon as the file is recreated returns a public url
|
||||
* @return {Promise<void>}
|
||||
* @param file
|
||||
* @param {string} directory
|
||||
*/
|
||||
const streamFileToGCS = async (
|
||||
file: Express.Multer.File,
|
||||
directory: string
|
||||
): Promise<IUploadResponse> => {
|
||||
const SERVICE_FOLDER = 'express-typescript-api-rest'; // replace with your own service folder name
|
||||
|
||||
// destructuring data file object
|
||||
const {originalname, buffer, mimetype} = file;
|
||||
|
||||
// generate a random uuid to avoid duplicate file name
|
||||
const uuid = crypto.randomBytes(4).toString('hex');
|
||||
|
||||
// generate a file name
|
||||
const fileName = `${uuid} - ${originalname.replace(/ /g, '_')}`;
|
||||
|
||||
// Instantiate a stream to read the file buffer
|
||||
const dataStream = new stream.PassThrough();
|
||||
|
||||
dataStream.push(buffer);
|
||||
dataStream.push(null);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const blob = bucket.file(
|
||||
`${SERVICE_FOLDER}/${directory}/${slugify(fileName || uuid)}`
|
||||
);
|
||||
|
||||
const blobStream = dataStream.pipe(
|
||||
blob.createWriteStream({
|
||||
resumable: false,
|
||||
public: true,
|
||||
predefinedAcl: 'publicRead',
|
||||
metadata: {
|
||||
contentType: mimetype,
|
||||
cacheControl: 'no-store',
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
blobStream
|
||||
.on('finish', () => {
|
||||
const blobName = blob.name;
|
||||
const publicUrl = format(
|
||||
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
|
||||
);
|
||||
resolve({publicUrl, blobName});
|
||||
})
|
||||
.on('error', error => {
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* @param blobName
|
||||
* @returns
|
||||
*/
|
||||
const deleteFileFromGCS = async (blobName: string): Promise<void> => {
|
||||
try {
|
||||
await bucket.file(blobName).delete();
|
||||
} catch (e) {
|
||||
Logger.error(e);
|
||||
// console.log(e.toString());
|
||||
if (e instanceof CustomError) {
|
||||
throw new CustomError(
|
||||
404,
|
||||
`Failed to delete file ${blobName}: ${e.message}`
|
||||
);
|
||||
} else {
|
||||
throw new Error(`Failed to delete file ${blobName}`);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export {
|
||||
streamBufferToLFS,
|
||||
uploadFileToGCS,
|
||||
streamBufferToGCS,
|
||||
streamFileToGCS,
|
||||
deleteFileFromGCS,
|
||||
};
|
||||
|
|
@ -0,0 +1,90 @@
|
|||
import {create} from 'xmlbuilder2';
|
||||
import stream from 'stream';
|
||||
import {Storage} from '@google-cloud/storage';
|
||||
import crypto from 'crypto';
|
||||
import slugify from 'slugify';
|
||||
import {IUploadResponse} from '../upload/upload.service';
|
||||
|
||||
export interface IXMLObject {
|
||||
key: string;
|
||||
}
|
||||
|
||||
const storage = new Storage();
|
||||
const bucket = storage.bucket(<string>process.env.GOOGLE_STORAGE_BUCKET_NAME);
|
||||
|
||||
const generateXML = async (body: IXMLObject): Promise<IUploadResponse> => {
|
||||
const SERVICE_FOLDER = 'express-typescript-api-rest';
|
||||
const DIRECTORY = 'xml';
|
||||
const UUID = crypto.randomBytes(4).toString('hex');
|
||||
|
||||
const {key} = body;
|
||||
|
||||
const doc = create(
|
||||
{version: '1.0', encoding: 'UTF-8'},
|
||||
{
|
||||
// '?': 'xml-stylesheet type="text/xsl" href="https://storage.googleapis.com/your-bucket/assets/xml/stylesheet.xsl"',
|
||||
'p:MainXmlSubject': {
|
||||
'@': {
|
||||
'xmlns:ds': 'http://www.w3.org/2000/09/xmldsig#',
|
||||
'xmlns:p':
|
||||
'http://ivaservizi.agenziaentrate.gov.it/docs/xsd/fatture/v1.2',
|
||||
'xmlns:xsi': 'http://www.w3.org/2001/XMLSchema-instance',
|
||||
versione: 'FPR12',
|
||||
},
|
||||
Header: {
|
||||
SubHeader: {
|
||||
Key: {
|
||||
Value: 'value',
|
||||
},
|
||||
},
|
||||
},
|
||||
Body: {
|
||||
SubBody: {
|
||||
Key: {
|
||||
Value: 'value',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
).doc();
|
||||
|
||||
const xmlBuffer = doc.end({headless: true, prettyPrint: true});
|
||||
|
||||
const dataStreama = new stream.PassThrough();
|
||||
|
||||
dataStreama.push(xmlBuffer);
|
||||
dataStreama.push(null);
|
||||
|
||||
const fileName = `IT09568521000_${UUID}_${key}.xml`;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const blob = bucket.file(
|
||||
`${SERVICE_FOLDER}/${DIRECTORY}/${slugify(fileName)}.xml`
|
||||
);
|
||||
|
||||
const blobStream = dataStreama.pipe(
|
||||
blob.createWriteStream({
|
||||
resumable: false,
|
||||
public: true,
|
||||
predefinedAcl: 'publicRead',
|
||||
metadata: {
|
||||
cacheControl: 'no-store',
|
||||
contentType: 'application/xml',
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
blobStream
|
||||
.on('finish', () => {
|
||||
const blobName = blob.name;
|
||||
const publicUrl = `https://storage.googleapis.com/${bucket.name}/${blob.name}`;
|
||||
resolve({publicUrl, blobName});
|
||||
})
|
||||
.on('error', error => {
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
export {generateXML};
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
import mongoose from 'mongoose';
|
||||
// eslint-disable-next-line node/no-unpublished-import
|
||||
import request from 'supertest';
|
||||
import app from '../index';
|
||||
|
||||
/**
|
||||
* Test to see if the server is running
|
||||
*/
|
||||
describe(`GET /api/v1/${process.env.SERVICE_NAME}`, () => {
|
||||
test('should return 200 OK', async () => {
|
||||
const res = await request(app).get(`/api/v1/${process.env.SERVICE_NAME}`);
|
||||
expect(res.statusCode).toEqual(200);
|
||||
});
|
||||
afterAll(done => {
|
||||
// Closing the DB connection allows Jest to exit successfully.
|
||||
mongoose.connection.close();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,99 @@
|
|||
import {
|
||||
formatDateToITLocale,
|
||||
generateDateRangeArray,
|
||||
getDaysCountBetweenDates,
|
||||
getFormattedDate,
|
||||
getMonthDaysCount,
|
||||
getMonthsCountBetweenDates,
|
||||
isDateToday,
|
||||
} from '../../utils/dates.utils';
|
||||
|
||||
describe('Date Utilities Tests Suite', () => {
|
||||
describe('generateDateRangeArray', () => {
|
||||
it('should return an empty array when no start or end date is provided', () => {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
const result = generateDateRangeArray(null, null);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should generate an array of dates between the start and end dates', () => {
|
||||
const startDate = new Date('2023-02-01');
|
||||
const endDate = new Date('2023-02-05');
|
||||
const result = generateDateRangeArray(startDate, endDate);
|
||||
expect(result.length).toEqual(5);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMonthDaysCount', () => {
|
||||
it('should return the correct number of days in the specified month', () => {
|
||||
const result = getMonthDaysCount(2, 2023);
|
||||
expect(result).toEqual(28);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDaysCountBetweenDates', () => {
|
||||
it('should return the correct number of days between two dates', () => {
|
||||
const startDate = new Date('2023-02-01');
|
||||
const endDate = new Date('2023-02-05');
|
||||
const result = getDaysCountBetweenDates(startDate, endDate);
|
||||
expect(result).toEqual(4);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMonthsCountBetweenDates', () => {
|
||||
it('should return the correct number of months between two dates', () => {
|
||||
const startDate = new Date('2022-12-01');
|
||||
const endDate = new Date('2023-02-01');
|
||||
const result = getMonthsCountBetweenDates(startDate, endDate);
|
||||
expect(result).toEqual(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatDateToITLocale', () => {
|
||||
it('should return a formatted date in Italian locale', () => {
|
||||
const date = new Date('2023-02-20');
|
||||
const result = formatDateToITLocale(date);
|
||||
expect(result).toEqual('20/02/2023');
|
||||
});
|
||||
});
|
||||
|
||||
describe('isDateToday', () => {
|
||||
it('should return true when the date is today', () => {
|
||||
const date = new Date();
|
||||
const result = isDateToday(date);
|
||||
expect(result).toEqual(true);
|
||||
});
|
||||
|
||||
it('should return false when the date is not today', () => {
|
||||
const today = new Date();
|
||||
const nonTodayDate = new Date(
|
||||
today.getFullYear(),
|
||||
today.getMonth(),
|
||||
today.getDate() - 1
|
||||
);
|
||||
const result = isDateToday(nonTodayDate);
|
||||
expect(result).toEqual(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFormattedDate', () => {
|
||||
it('should return an empty string if the input date is not valid', () => {
|
||||
const invalidDate = new Date('invalid');
|
||||
const result = getFormattedDate(invalidDate);
|
||||
expect(result).toEqual('');
|
||||
});
|
||||
|
||||
it('should return a formatted date in the specified format', () => {
|
||||
const date = new Date('2023-02-20');
|
||||
const result = getFormattedDate(date, 'dd/MM/yyyy');
|
||||
expect(result).toEqual('20/02/2023');
|
||||
});
|
||||
|
||||
it('should return a formatted date in the default format if no format is specified', () => {
|
||||
const date = new Date('2023-02-20');
|
||||
const result = getFormattedDate(date);
|
||||
expect(result).toEqual('2023-02-20');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
// eslint-disable-next-line node/no-unpublished-import
|
||||
import request from 'supertest';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import express from 'express';
|
||||
import {ICustomExpressRequest} from '../../middlewares/currentUser.middleware';
|
||||
import {
|
||||
generateOTP,
|
||||
generateCookie,
|
||||
generateJsonWebToken,
|
||||
JwtPayload,
|
||||
} from '../../../src/utils/generators.utils';
|
||||
|
||||
// Mock process.env.JWT_KEY with a string value
|
||||
process.env.JWT_KEY = 'mock_jwt_key';
|
||||
|
||||
describe('Generators Utilities Tests Suite', () => {
|
||||
describe('generateOTP', () => {
|
||||
it('should generate a 6 digit OTP', () => {
|
||||
const otp = generateOTP();
|
||||
expect(otp).toHaveLength(6);
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateJsonWebToken', () => {
|
||||
it('should generate a JWT token', () => {
|
||||
const payload = {id: 1, username: 'user1'};
|
||||
const token = generateJsonWebToken(payload);
|
||||
const decoded = jwt.verify(token, process.env.JWT_KEY!) as JwtPayload<
|
||||
typeof payload
|
||||
>;
|
||||
expect(decoded.payload).toEqual(payload);
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateCookie', () => {
|
||||
it('should set a cookie with the given name and token', async () => {
|
||||
const app = express();
|
||||
const cookieName = 'my-cookie';
|
||||
const token = 'my-token';
|
||||
|
||||
app.get('/set-cookie', (req, res) => {
|
||||
generateCookie(cookieName, token, req as ICustomExpressRequest, res);
|
||||
res.status(200).send('Cookie set');
|
||||
});
|
||||
|
||||
const response = await request(app).get('/set-cookie');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.header['set-cookie']).toBeDefined();
|
||||
expect(response.header['set-cookie'][0]).toContain(cookieName);
|
||||
expect(response.header['set-cookie'][0]).toContain(token);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
import {cleanObject} from '../../utils/objects.utils';
|
||||
|
||||
describe('cleanObject', () => {
|
||||
it('should remove null and undefined values from an object and its nested objects', () => {
|
||||
const input = {
|
||||
a: 1,
|
||||
b: null,
|
||||
c: {
|
||||
d: 'hello',
|
||||
e: null,
|
||||
f: {
|
||||
g: 2,
|
||||
h: undefined,
|
||||
},
|
||||
},
|
||||
d: [
|
||||
{
|
||||
a: 1,
|
||||
b: [null, undefined, 1, 2, 3],
|
||||
},
|
||||
{
|
||||
b: null,
|
||||
},
|
||||
],
|
||||
};
|
||||
const expectedOutput = {
|
||||
a: 1,
|
||||
c: {
|
||||
d: 'hello',
|
||||
f: {
|
||||
g: 2,
|
||||
},
|
||||
},
|
||||
d: [
|
||||
{
|
||||
a: 1,
|
||||
b: [1, 2, 3],
|
||||
},
|
||||
],
|
||||
};
|
||||
expect(cleanObject(input)).toEqual(expectedOutput);
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
declare module 'xss-clean' {
|
||||
const value: Function;
|
||||
|
||||
export default value;
|
||||
}
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
import {Response} from 'express';
|
||||
import {IUserMethods} from '../api/v1/user/user.model';
|
||||
import {ICustomExpressRequest} from '../middlewares/currentUser.middleware';
|
||||
|
||||
/**
|
||||
*
|
||||
* This function returns a json with user data,
|
||||
* token and the status and set a cookie with
|
||||
* the name jwt. We use this in the response
|
||||
* of login or signup
|
||||
* @param user:
|
||||
* @param statusCode
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
const createCookieFromToken = (
|
||||
user: IUserMethods,
|
||||
statusCode: number,
|
||||
req: ICustomExpressRequest,
|
||||
res: Response
|
||||
) => {
|
||||
const token = user.generateVerificationToken();
|
||||
|
||||
const cookieOptions = {
|
||||
expires: new Date(Date.now() + 24 * 60 * 60 * 1000),
|
||||
httpOnly: true,
|
||||
secure: req.secure || req.headers['x-forwarded-proto'] === 'https',
|
||||
};
|
||||
|
||||
res.cookie('jwt', token, cookieOptions);
|
||||
|
||||
res.status(statusCode).json({
|
||||
status: 'success',
|
||||
token,
|
||||
token_expires: cookieOptions.expires,
|
||||
data: {
|
||||
user,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
export default createCookieFromToken;
|
||||
|
|
@ -0,0 +1,132 @@
|
|||
/**
|
||||
* Generates an array of dates between the start and end dates
|
||||
* @param startDate
|
||||
* @param endDate
|
||||
* @returns
|
||||
*/
|
||||
const generateDateRangeArray = (startDate: Date, endDate: Date) => {
|
||||
let dates: Date[] = [];
|
||||
|
||||
if (!startDate || !endDate) {
|
||||
return dates;
|
||||
}
|
||||
|
||||
// to avoid modifying the original date
|
||||
const currentDate = new Date(startDate);
|
||||
|
||||
while (currentDate < new Date(endDate)) {
|
||||
dates = [...dates, new Date(currentDate)];
|
||||
currentDate.setDate(currentDate.getDate() + 1);
|
||||
}
|
||||
dates = [...dates, new Date(endDate)];
|
||||
return dates;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the number of days in a month
|
||||
* @param month
|
||||
* @param year
|
||||
* @returns
|
||||
*/
|
||||
const getMonthDaysCount = (month: number, year: number): number => {
|
||||
return new Date(year, month, 0).getDate();
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the number of days between two dates
|
||||
* @param startDateObj
|
||||
* @param endDateObj
|
||||
* @returns
|
||||
*/
|
||||
const getDaysCountBetweenDates = (startDateObj: Date, endDateObj: Date) => {
|
||||
const MILLISECONDS_PER_DAY = 24 * 60 * 60 * 1000;
|
||||
const startDate = new Date(startDateObj).setHours(0, 0, 0, 0);
|
||||
const endDate = new Date(endDateObj).setHours(0, 0, 0, 0);
|
||||
|
||||
const timeDiff = Math.abs(startDate - endDate);
|
||||
const daysDiff = Math.ceil(timeDiff / MILLISECONDS_PER_DAY);
|
||||
|
||||
return daysDiff;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the number of months between two dates
|
||||
* @param date1
|
||||
* @param date2
|
||||
* @returns
|
||||
*/
|
||||
const getMonthsCountBetweenDates = (startDateObj: Date, endDateObj: Date) => {
|
||||
const startDate = new Date(startDateObj);
|
||||
const endDate = new Date(endDateObj);
|
||||
|
||||
const startYear = startDate.getFullYear();
|
||||
const startMonth = startDate.getMonth();
|
||||
|
||||
const endYear = endDate.getFullYear();
|
||||
const endMonth = endDate.getMonth();
|
||||
|
||||
const monthsDiff = (endYear - startYear) * 12 + (endMonth - startMonth);
|
||||
|
||||
return Math.abs(monthsDiff);
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the number of months between two dates
|
||||
* @param date
|
||||
* @returns
|
||||
*/
|
||||
const formatDateToITLocale = (date: Date) => {
|
||||
return new Intl.DateTimeFormat('it-IT', {
|
||||
year: 'numeric',
|
||||
month: '2-digit',
|
||||
day: '2-digit',
|
||||
}).format(date);
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks if a date is today
|
||||
* @param date
|
||||
* @returns
|
||||
*/
|
||||
const isDateToday = (date: Date) => {
|
||||
const today = new Date();
|
||||
return (
|
||||
date.getDate() === today.getDate() &&
|
||||
date.getMonth() === today.getMonth() &&
|
||||
date.getFullYear() === today.getFullYear()
|
||||
);
|
||||
};
|
||||
|
||||
const getFormattedDate = (date: Date, format = 'yyyy-MM-dd') => {
|
||||
if (isNaN(date.getTime())) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const dateFormatter = new Intl.DateTimeFormat('en-US', {
|
||||
year: 'numeric',
|
||||
month: '2-digit',
|
||||
day: '2-digit',
|
||||
});
|
||||
|
||||
const formattedDate = dateFormatter.format(date);
|
||||
|
||||
const formattedDateParts = formattedDate.split('/');
|
||||
const year = formattedDateParts[2];
|
||||
const month = formattedDateParts[0];
|
||||
const day = formattedDateParts[1];
|
||||
|
||||
return format
|
||||
.replace(/yyyy/g, year)
|
||||
.replace(/MM/g, month)
|
||||
.replace(/dd/g, day);
|
||||
};
|
||||
|
||||
export {
|
||||
generateDateRangeArray,
|
||||
getMonthDaysCount,
|
||||
getDaysCountBetweenDates,
|
||||
getMonthsCountBetweenDates,
|
||||
formatDateToITLocale,
|
||||
isDateToday,
|
||||
getFormattedDate,
|
||||
};
|
||||
|
|
@ -0,0 +1,66 @@
|
|||
import crypto from 'crypto';
|
||||
import {ICustomExpressRequest} from '../middlewares/currentUser.middleware';
|
||||
import {Response} from 'express';
|
||||
import jwt from 'jsonwebtoken';
|
||||
|
||||
export interface JwtPayload<T> {
|
||||
[key: string]: T;
|
||||
}
|
||||
/**
|
||||
* Generate a json web token
|
||||
* @param payload
|
||||
* @returns
|
||||
*/
|
||||
const generateJsonWebToken = <T>(payload: JwtPayload<T>): string => {
|
||||
const jwtKey = process.env.JWT_KEY;
|
||||
|
||||
if (!jwtKey) {
|
||||
throw new Error('Missing JWT');
|
||||
}
|
||||
|
||||
return jwt.sign({payload}, jwtKey, {
|
||||
expiresIn: '10d',
|
||||
// algorithm: 'RS256',
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Generate a cookie with a token
|
||||
* @param cookieName
|
||||
* @param token
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
const generateCookie = (
|
||||
cookieName: string,
|
||||
token: string,
|
||||
req: ICustomExpressRequest,
|
||||
res: Response
|
||||
) => {
|
||||
const cookieOptions = {
|
||||
expires: new Date(Date.now() + 10 * 24 * 60 * 60 * 1000),
|
||||
httpOnly: true,
|
||||
secure: req.secure || req.headers['x-forwarded-proto'] === 'https',
|
||||
};
|
||||
|
||||
res.cookie(cookieName, token, cookieOptions);
|
||||
};
|
||||
|
||||
/**
|
||||
* Generate a random OTP
|
||||
* @returns
|
||||
*/
|
||||
const generateOTP = (): string => {
|
||||
const chars = '0123456789';
|
||||
let otp = '';
|
||||
|
||||
while (otp.length < 6) {
|
||||
const randomBytes = crypto.randomBytes(4);
|
||||
const randomIndex = randomBytes.readUInt32BE(0) % chars.length;
|
||||
otp += chars.charAt(randomIndex);
|
||||
}
|
||||
|
||||
return otp;
|
||||
};
|
||||
|
||||
export {generateOTP, generateCookie, generateJsonWebToken};
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
import cleanDeep from 'clean-deep';
|
||||
|
||||
interface IObjectWithNulls {
|
||||
[key: string]: unknown | null | IObjectWithNulls;
|
||||
}
|
||||
|
||||
const cleanObject = (obj: IObjectWithNulls): IObjectWithNulls => {
|
||||
return cleanDeep(obj);
|
||||
};
|
||||
|
||||
export {cleanObject};
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
"extends": "./node_modules/gts/tsconfig-google.json",
|
||||
"compilerOptions": {
|
||||
"rootDir": ".",
|
||||
"outDir": "build",
|
||||
"esModuleInterop": true,
|
||||
"experimentalDecorators": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
"resolveJsonModule": true,
|
||||
"noImplicitReturns": false,
|
||||
"paths": {
|
||||
"*": ["./node_modules/*", "./src/types/*"]
|
||||
}
|
||||
},
|
||||
"include": ["src/**/*.ts", "test/**/*.ts"],
|
||||
"exclude": ["node_modules", "build", "docs", "**/*.test.ts"]
|
||||
}
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
// Comments are supported, like tsconfig.json
|
||||
"entryPoints": [
|
||||
"src/index.ts",
|
||||
"src/api/v1/app/app.controller.ts",
|
||||
"src/middlewares/currentUser.middleware.ts"
|
||||
],
|
||||
"exclude": ["**/node_modules/**", "**/*.spec.ts", "**/*.test.ts", "dist"],
|
||||
|
||||
"out": "docs"
|
||||
}
|
||||
Loading…
Reference in New Issue