Compare commits
1 Commits
Author | SHA1 | Date |
---|---|---|
Agusti Fernandez Pardo | 4afb2946a7 |
|
@ -1,8 +0,0 @@
|
|||
# Changesets
|
||||
|
||||
Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool that works
|
||||
with multi-package repos, or single-package repos to help you version and publish your code. You can
|
||||
find the full documentation for it [in our repository](https://github.com/changesets/changesets)
|
||||
|
||||
We have a quick list of common questions to get you started engaging with this project in
|
||||
[our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md)
|
|
@ -1,11 +0,0 @@
|
|||
{
|
||||
"$schema": "https://unpkg.com/@changesets/config@2.3.0/schema.json",
|
||||
"changelog": "@changesets/cli/changelog",
|
||||
"commit": false,
|
||||
"fixed": [],
|
||||
"linked": [],
|
||||
"access": "public",
|
||||
"baseBranch": "main",
|
||||
"updateInternalDependencies": "patch",
|
||||
"ignore": []
|
||||
}
|
|
@ -1,7 +1,6 @@
|
|||
# ********** INDEX **********
|
||||
#
|
||||
# - APP STORE
|
||||
# - BASECAMP
|
||||
# - DAILY.CO VIDEO
|
||||
# - GOOGLE CALENDAR/MEET/LOGIN
|
||||
# - HUBSPOT
|
||||
|
@ -12,37 +11,17 @@
|
|||
# - ZOOM
|
||||
# - GIPHY
|
||||
# - VITAL
|
||||
# - ZAPIER
|
||||
# - LARK
|
||||
# - WEB3
|
||||
# - SALESFORCE
|
||||
# - ZOHOCRM
|
||||
# - ZOHO_BIGIN
|
||||
|
||||
# - APP STORE **********************************************************************************************
|
||||
# ⚠️ ⚠️ ⚠️ THESE WILL BE MIGRATED TO THE DATABASE TO PREVENT AWS's 4KB ENV QUOTA ⚠️ ⚠️ ⚠️
|
||||
|
||||
# - BASECAMP
|
||||
# Used to enable Basecamp integration with Cal.com
|
||||
# @see https://github.com/calcom/cal.com#obtaining-basecamp-client-id-and-secret
|
||||
BASECAMP3_CLIENT_ID=
|
||||
BASECAMP3_CLIENT_SECRET=
|
||||
BASECAMP3_USER_AGENT=
|
||||
|
||||
# - DAILY.CO VIDEO
|
||||
# Enables Cal Video. to get your key
|
||||
# 1. Visit our [Daily.co Partnership Form](https://go.cal.com/daily) and enter your information
|
||||
# 2. From within your dashboard, go to the [developers](https://dashboard.daily.co/developers) tab.
|
||||
# @see https://github.com/calcom/cal.com#obtaining-daily-api-credentials
|
||||
|
||||
DAILY_API_KEY=
|
||||
DAILY_SCALE_PLAN=''
|
||||
|
||||
# - GOOGLE CALENDAR/MEET/LOGIN
|
||||
# Needed to enable Google Calendar integration and Login with Google
|
||||
# @see https://github.com/calcom/cal.com#obtaining-the-google-api-credentials
|
||||
GOOGLE_API_CREDENTIALS=
|
||||
|
||||
# @see https://github.com/calendso/calendso#obtaining-the-google-api-credentials
|
||||
GOOGLE_API_CREDENTIALS='{}'
|
||||
# To enable Login with Google you need to:
|
||||
# 1. Set `GOOGLE_API_CREDENTIALS` above
|
||||
# 2. Set `GOOGLE_LOGIN_ENABLED` to `true`
|
||||
|
@ -102,28 +81,4 @@ VITAL_WEBHOOK_SECRET=
|
|||
VITAL_DEVELOPMENT_MODE="sandbox"
|
||||
# "us" | "eu"
|
||||
VITAL_REGION="us"
|
||||
|
||||
# - ZAPIER
|
||||
# Used for the Zapier integration
|
||||
# @see https://github.com/calcom/cal.com/blob/main/packages/app-store/zapier/README.md
|
||||
ZAPIER_INVITE_LINK=""
|
||||
|
||||
# - LARK
|
||||
# Needed to enable Lark Calendar integration and Login with Lark
|
||||
# @see <https://open.larksuite.com/document/ukTMukTMukTM/ukDNz4SO0MjL5QzM/g>
|
||||
LARK_OPEN_APP_ID=""
|
||||
LARK_OPEN_APP_SECRET=""
|
||||
LARK_OPEN_VERIFICATION_TOKEN=""
|
||||
|
||||
# - SALESFORCE
|
||||
# Used for the Salesforce (Sales Cloud) app
|
||||
SALESFORCE_CONSUMER_KEY=""
|
||||
SALESFORCE_CONSUMER_SECRET=""
|
||||
|
||||
# - ZOHOCRM
|
||||
# Used for the Zoho CRM integration
|
||||
ZOHOCRM_CLIENT_ID=""
|
||||
ZOHOCRM_CLIENT_SECRET=""
|
||||
|
||||
|
||||
# *********************************************************************************************************
|
||||
|
|
193
.env.example
193
.env.example
|
@ -1,146 +1,84 @@
|
|||
# ********** INDEX **********
|
||||
#
|
||||
# - LICENSE (DEPRECATED)
|
||||
# - LICENSE
|
||||
# - DATABASE
|
||||
# - SHARED
|
||||
# - NEXTAUTH
|
||||
# - E-MAIL SETTINGS
|
||||
# - ORGANIZATIONS
|
||||
|
||||
# - LICENSE (DEPRECATED) ************************************************************************************
|
||||
# https://github.com/calcom/cal.com/blob/main/LICENSE
|
||||
# - LICENSE *************************************************************************************************
|
||||
# Set this value to 'agree' to accept our license:
|
||||
# LICENSE: https://github.com/calendso/calendso/blob/main/LICENSE
|
||||
#
|
||||
# Summary of terms:
|
||||
# - The codebase has to stay open source, whether it was modified or not
|
||||
# - You can not repackage or sell the codebase
|
||||
# - Acquire a commercial license to remove these terms by visiting: cal.com/sales
|
||||
#
|
||||
# To enable enterprise-only features, as an admin, go to /auth/setup to select your license and follow
|
||||
# instructions. This environment variable is deprecated although still supported for backward compatibility.
|
||||
# @see https://console.cal.com
|
||||
CALCOM_LICENSE_KEY=
|
||||
NEXT_PUBLIC_LICENSE_CONSENT=''
|
||||
# ***********************************************************************************************************
|
||||
|
||||
# - DATABASE ************************************************************************************************
|
||||
DATABASE_URL="postgresql://postgres:@localhost:5450/calendso"
|
||||
UPSTASH_REDIS_REST_URL=
|
||||
UPSTASH_REDIS_REST_TOKEN=
|
||||
|
||||
# Uncomment to enable a dedicated connection pool for Prisma using Prisma Data Proxy
|
||||
# Cold boots will be faster and you'll be able to scale your DB independently of your app.
|
||||
# @see https://www.prisma.io/docs/data-platform/data-proxy/use-data-proxy
|
||||
# PRISMA_GENERATE_DATAPROXY=true
|
||||
PRISMA_GENERATE_DATAPROXY=
|
||||
|
||||
# ⚠️ ⚠️ ⚠️ DATABASE_URL got moved to `packages/prisma/.env.example` ⚠️ ⚠️ ⚠️
|
||||
# ***********************************************************************************************************
|
||||
|
||||
# - SHARED **************************************************************************************************
|
||||
# Set this to http://app.cal.local:3000 if you want to enable organizations, and
|
||||
# check variable ORGANIZATIONS_ENABLED at the bottom of this file
|
||||
NEXT_PUBLIC_WEBAPP_URL='http://localhost:3000'
|
||||
# Change to 'http://localhost:3001' if running the website simultaneously
|
||||
NEXT_PUBLIC_WEBSITE_URL='http://localhost:3000'
|
||||
NEXT_PUBLIC_CONSOLE_URL='http://localhost:3004'
|
||||
NEXT_PUBLIC_EMBED_LIB_URL='http://localhost:3000/embed/embed.js'
|
||||
|
||||
# To enable SAML login, set both these variables
|
||||
# @see https://github.com/calcom/cal.com/tree/main/packages/features/ee#setting-up-saml-login
|
||||
# @see https://github.com/calcom/cal.com/tree/main/packages/ee#setting-up-saml-login
|
||||
# SAML_DATABASE_URL="postgresql://postgres:@localhost:5450/cal-saml"
|
||||
SAML_DATABASE_URL=
|
||||
# SAML_ADMINS='pro@example.com'
|
||||
SAML_ADMINS=
|
||||
# NEXT_PUBLIC_HOSTED_CAL_FEATURES=1
|
||||
NEXT_PUBLIC_HOSTED_CAL_FEATURES=
|
||||
# For additional security set to a random secret and use that value as the client_secret during the OAuth 2.0 flow.
|
||||
SAML_CLIENT_SECRET_VERIFIER=
|
||||
|
||||
# If you use Heroku to deploy Postgres (or use self-signed certs for Postgres) then uncomment the follow line.
|
||||
# @see https://devcenter.heroku.com/articles/connecting-heroku-postgres#connecting-in-node-js
|
||||
# PGSSLMODE='no-verify'
|
||||
PGSSLMODE=
|
||||
|
||||
# Define which hostnames are expected for the app to work on
|
||||
ALLOWED_HOSTNAMES='"cal.com","cal.dev","cal-staging.com","cal.community","cal.local:3000","localhost:3000"'
|
||||
# Reserved orgs subdomains for our own usage
|
||||
RESERVED_SUBDOMAINS='"app","auth","docs","design","console","go","status","api","saml","www","matrix","developer","cal","my","team","support","security","blog","learn","admin"'
|
||||
|
||||
# - NEXTAUTH
|
||||
# @see: https://github.com/calendso/calendso/issues/263
|
||||
# @see: https://next-auth.js.org/configuration/options#nextauth_url
|
||||
# Required for Vercel hosting - set NEXTAUTH_URL to equal your NEXT_PUBLIC_WEBAPP_URL
|
||||
NEXTAUTH_URL='http://localhost:3000'
|
||||
# NEXTAUTH_URL='http://localhost:3000'
|
||||
NEXTAUTH_URL=
|
||||
# @see: https://next-auth.js.org/configuration/options#nextauth_secret
|
||||
# You can use: `openssl rand -base64 32` to generate one
|
||||
NEXTAUTH_SECRET=
|
||||
# Used for cross-domain cookie authentication
|
||||
NEXTAUTH_COOKIE_DOMAIN=
|
||||
NEXTAUTH_COOKIE_DOMAIN=.example.com
|
||||
|
||||
# Set this to '1' if you don't want Cal to collect anonymous usage
|
||||
CALCOM_TELEMETRY_DISABLED=
|
||||
# Remove this var if you don't want Cal to collect anonymous usage
|
||||
NEXT_PUBLIC_TELEMETRY_KEY=js.2pvs2bbpqq1zxna97wcml.oi2jzirnbj1ev4tc57c5r
|
||||
|
||||
# ApiKey for cronjobs
|
||||
CRON_API_KEY='0cc0e6c35519bba620c9360cfe3e68d0'
|
||||
|
||||
# Whether to automatically keep app metadata in the database in sync with the metadata/config files. When disabled, the
|
||||
# sync runs in a reporting-only dry-run mode.
|
||||
CRON_ENABLE_APP_SYNC=false
|
||||
|
||||
# Application Key for symmetric encryption and decryption
|
||||
# must be 32 bytes for AES256 encryption algorithm
|
||||
# You can use: `openssl rand -base64 32` to generate one
|
||||
# You can use: `openssl rand -base64 24` to generate one
|
||||
CALENDSO_ENCRYPTION_KEY=
|
||||
|
||||
# Intercom Config
|
||||
NEXT_PUBLIC_INTERCOM_APP_ID=
|
||||
|
||||
# Secret to enable Intercom Identity Verification
|
||||
INTERCOM_SECRET=
|
||||
|
||||
# Zendesk Config
|
||||
NEXT_PUBLIC_ZENDESK_KEY=
|
||||
|
||||
# Help Scout Config
|
||||
NEXT_PUBLIC_HELPSCOUT_KEY=
|
||||
|
||||
# Fresh Chat Config
|
||||
NEXT_PUBLIC_FRESHCHAT_TOKEN=
|
||||
NEXT_PUBLIC_FRESHCHAT_HOST=
|
||||
|
||||
# Inbox to send user feedback
|
||||
SEND_FEEDBACK_EMAIL=
|
||||
|
||||
# Sengrid
|
||||
# Used for email reminders in workflows and internal sync services
|
||||
SENDGRID_API_KEY=
|
||||
SENDGRID_EMAIL=
|
||||
NEXT_PUBLIC_SENDGRID_SENDER_NAME=
|
||||
|
||||
# Twilio
|
||||
# Used to send SMS reminders in workflows
|
||||
TWILIO_SID=
|
||||
TWILIO_TOKEN=
|
||||
TWILIO_MESSAGING_SID=
|
||||
TWILIO_PHONE_NUMBER=
|
||||
TWILIO_WHATSAPP_PHONE_NUMBER=
|
||||
# For NEXT_PUBLIC_SENDER_ID only letters, numbers and spaces are allowed (max. 11 characters)
|
||||
NEXT_PUBLIC_SENDER_ID=
|
||||
TWILIO_VERIFY_SID=
|
||||
|
||||
# Set it to "1" if you need to run E2E tests locally.
|
||||
# This is used so we can bypass emails in auth flows for E2E testing
|
||||
# Set it to "1" if you need to run E2E tests locally
|
||||
NEXT_PUBLIC_IS_E2E=
|
||||
|
||||
# Used for internal billing system
|
||||
NEXT_PUBLIC_STRIPE_PRO_PLAN_PRODUCT=
|
||||
NEXT_PUBLIC_STRIPE_PRO_PLAN_PRICE=
|
||||
NEXT_PUBLIC_STRIPE_PREMIUM_PLAN_PRICE=
|
||||
NEXT_PUBLIC_IS_PREMIUM_NEW_PLAN=0
|
||||
NEXT_PUBLIC_STRIPE_PREMIUM_NEW_PLAN_PRICE=
|
||||
STRIPE_TEAM_MONTHLY_PRICE_ID=
|
||||
STRIPE_ORG_MONTHLY_PRICE_ID=
|
||||
STRIPE_WEBHOOK_SECRET=
|
||||
STRIPE_PRIVATE_KEY=
|
||||
STRIPE_CLIENT_ID=
|
||||
PAYMENT_FEE_FIXED=
|
||||
PAYMENT_FEE_PERCENTAGE=
|
||||
NEXT_PUBLIC_STRIPE_FREE_PLAN_PRICE=
|
||||
|
||||
# Use for internal Public API Keys and optional
|
||||
API_KEY_PREFIX=cal_
|
||||
|
@ -153,16 +91,12 @@ API_KEY_PREFIX=cal_
|
|||
EMAIL_FROM='notifications@yourselfhostedcal.com'
|
||||
|
||||
# Configure SMTP settings (@see https://nodemailer.com/smtp/).
|
||||
# Configuration to receive emails locally (mailhog)
|
||||
EMAIL_SERVER_HOST='localhost'
|
||||
EMAIL_SERVER_PORT=1025
|
||||
|
||||
# Note: The below configuration for Office 365 has been verified to work.
|
||||
# EMAIL_SERVER_HOST='smtp.office365.com'
|
||||
# EMAIL_SERVER_PORT=587
|
||||
# EMAIL_SERVER_USER='<office365_emailAddress>'
|
||||
EMAIL_SERVER_HOST='smtp.office365.com'
|
||||
EMAIL_SERVER_PORT=587
|
||||
EMAIL_SERVER_USER='<office365_emailAddress>'
|
||||
# Keep in mind that if you have 2FA enabled, you will need to provision an App Password.
|
||||
# EMAIL_SERVER_PASSWORD='<office365_password>'
|
||||
EMAIL_SERVER_PASSWORD='<office365_password>'
|
||||
|
||||
# The following configuration for Gmail has been verified to work.
|
||||
# EMAIL_SERVER_HOST='smtp.gmail.com'
|
||||
|
@ -171,89 +105,4 @@ EMAIL_SERVER_PORT=1025
|
|||
## You will need to provision an App Password.
|
||||
## @see https://support.google.com/accounts/answer/185833
|
||||
# EMAIL_SERVER_PASSWORD='<gmail_app_password>'
|
||||
|
||||
# Used for E2E for email testing
|
||||
# Set it to "1" if you need to email checks in E2E tests locally
|
||||
# Make sure to run mailhog container manually or with `yarn dx`
|
||||
E2E_TEST_MAILHOG_ENABLED=
|
||||
|
||||
# **********************************************************************************************************
|
||||
|
||||
# Set the following value to true if you wish to enable Team Impersonation
|
||||
NEXT_PUBLIC_TEAM_IMPERSONATION=false
|
||||
|
||||
# Close.com internal CRM
|
||||
CLOSECOM_API_KEY=
|
||||
|
||||
# Sendgrid internal sync service
|
||||
SENDGRID_SYNC_API_KEY=
|
||||
|
||||
# Change your Brand
|
||||
NEXT_PUBLIC_APP_NAME="Cal.com"
|
||||
NEXT_PUBLIC_SUPPORT_MAIL_ADDRESS="help@cal.com"
|
||||
NEXT_PUBLIC_COMPANY_NAME="Cal.com, Inc."
|
||||
# Set this to true in to disable new signups
|
||||
# NEXT_PUBLIC_DISABLE_SIGNUP=true
|
||||
NEXT_PUBLIC_DISABLE_SIGNUP=
|
||||
|
||||
# Content Security Policy
|
||||
CSP_POLICY=
|
||||
|
||||
# Vercel Edge Config
|
||||
EDGE_CONFIG=
|
||||
|
||||
NEXT_PUBLIC_MINUTES_TO_BOOK=5 # Minutes
|
||||
|
||||
# - ORGANIZATIONS *******************************************************************************************
|
||||
# Enable Organizations non-prod domain setup, works in combination with organizations feature flag
|
||||
# This is mainly needed locally, because for orgs to work a full domain name needs to point
|
||||
# to the app, i.e. app.cal.local instead of using localhost, which is very disruptive
|
||||
#
|
||||
# This variable should only be set to 1 or true if you are in a non-prod environment and you want to
|
||||
# use organizations
|
||||
ORGANIZATIONS_ENABLED=
|
||||
|
||||
# This variable should only be set to 1 or true if you want to autolink external provider sing-ups with
|
||||
# existing organizations based on email domain address
|
||||
ORGANIZATIONS_AUTOLINK=
|
||||
|
||||
# Vercel Config to create subdomains for organizations
|
||||
# Get it from https://vercel.com/<TEAM_OR_USER_NAME>/<PROJECT_SLUG>/settings
|
||||
PROJECT_ID_VERCEL=
|
||||
# Get it from: https://vercel.com/teams/<TEAM_SLUG>/settings
|
||||
TEAM_ID_VERCEL=
|
||||
# Get it from: https://vercel.com/account/tokens
|
||||
AUTH_BEARER_TOKEN_VERCEL=
|
||||
|
||||
# - APPLE CALENDAR
|
||||
# Used for E2E tests on Apple Calendar
|
||||
E2E_TEST_APPLE_CALENDAR_EMAIL=""
|
||||
E2E_TEST_APPLE_CALENDAR_PASSWORD=""
|
||||
|
||||
# - APP CREDENTIAL SYNC ***********************************************************************************
|
||||
# Used for self-hosters that are implementing Cal.com into their applications that already have certain integrations
|
||||
# Under settings/admin/apps ensure that all app secrets are set the same as the parent application
|
||||
# You can use: `openssl rand -base64 32` to generate one
|
||||
CALCOM_WEBHOOK_SECRET=""
|
||||
# This is the header name that will be used to verify the webhook secret. Should be in lowercase
|
||||
CALCOM_WEBHOOK_HEADER_NAME="calcom-webhook-secret"
|
||||
CALCOM_CREDENTIAL_SYNC_ENDPOINT=""
|
||||
# Key should match on Cal.com and your application
|
||||
# must be 32 bytes for AES256 encryption algorithm
|
||||
# You can use: `openssl rand -base64 24` to generate one
|
||||
CALCOM_APP_CREDENTIAL_ENCRYPTION_KEY=""
|
||||
|
||||
# - OIDC E2E TEST *******************************************************************************************
|
||||
|
||||
# Ensure this ADMIN EMAIL is present in the SAML_ADMINS list
|
||||
E2E_TEST_SAML_ADMIN_EMAIL=
|
||||
E2E_TEST_SAML_ADMIN_PASSWORD=
|
||||
|
||||
E2E_TEST_OIDC_CLIENT_ID=
|
||||
E2E_TEST_OIDC_CLIENT_SECRET=
|
||||
E2E_TEST_OIDC_PROVIDER_DOMAIN=
|
||||
|
||||
E2E_TEST_OIDC_USER_EMAIL=
|
||||
E2E_TEST_OIDC_USER_PASSWORD=
|
||||
|
||||
# ***********************************************************************************************************
|
||||
|
|
|
@ -1,6 +0,0 @@
|
|||
node_modules
|
||||
**/**/node_modules
|
||||
**/**/.next
|
||||
**/**/public
|
||||
packages/prisma/zod
|
||||
apps/web/public/embed
|
|
@ -20,20 +20,8 @@ A summary of the issue. This needs to be a clear detailed-rich summary.
|
|||
|
||||
Any other relevant information. For example, why do you consider this a bug and what did you expect to happen instead?
|
||||
|
||||
### Actual Results
|
||||
|
||||
- What's happening right now that is different from what is expected
|
||||
|
||||
### Expected Results
|
||||
|
||||
- This is an ideal result that the system should get after the tests are performed
|
||||
|
||||
### Technical details
|
||||
|
||||
- Browser version, screen recording, console logs, network requests: You can make a recording with [Bird Eats Bug](https://birdeatsbug.com/).
|
||||
- Node.js version
|
||||
- Anything else that you think could be an issue.
|
||||
|
||||
### Evidence
|
||||
|
||||
- How was this tested? This is quite mandatory in terms of bugs. Providing evidence of your testing with screenshots or/and videos is an amazing way to prove the bug and a troubleshooting chance to find the solution.
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Questions
|
||||
url: https://go.cal.com/discord
|
||||
about: Ask a general question about the project on our discord community
|
||||
url: https://cal.com/slack
|
||||
about: Ask a general question about the project on our Slack workspace
|
||||
|
|
|
@ -39,11 +39,3 @@ assignees: ""
|
|||
-->
|
||||
|
||||
(Write your answer here.)
|
||||
|
||||
### Requirement/Document
|
||||
|
||||
<!--
|
||||
Is there any type of document that could support that feature?
|
||||
-->
|
||||
|
||||
(Share it here.)
|
||||
|
|
|
@ -8,35 +8,21 @@ Fixes # (issue)
|
|||
Loom Video: https://www.loom.com/
|
||||
-->
|
||||
|
||||
## Requirement/Documentation
|
||||
|
||||
<!-- Please provide all documents that are important to understand the reason of that PR. -->
|
||||
|
||||
- If there is a requirement document, please, share it here.
|
||||
- If there is ab UI/UX design document, please, share it here.
|
||||
|
||||
## Type of change
|
||||
|
||||
<!-- Please delete bullets that are not relevant. -->
|
||||
<!-- Please delete options that are not relevant. -->
|
||||
|
||||
- [ ] Bug fix (non-breaking change which fixes an issue)
|
||||
- [ ] Chore (refactoring code, technical debt, workflow improvements)
|
||||
- [ ] New feature (non-breaking change which adds functionality)
|
||||
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||
- [ ] This change requires a documentation update
|
||||
|
||||
## How should this be tested?
|
||||
|
||||
<!-- Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration. Write details that help to start the tests -->
|
||||
<!-- Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration -->
|
||||
|
||||
- Are there environment variables that should be set?
|
||||
- What are the minimal test data to have?
|
||||
- What is expected (happy path) to have (input and output)?
|
||||
- Any other important info that could help to test that PR
|
||||
|
||||
## Mandatory Tasks
|
||||
|
||||
- [ ] Make sure you have self-reviewed the code. A decent size PR without self-review might be rejected.
|
||||
- [ ] Test A
|
||||
- [ ] Test B
|
||||
|
||||
## Checklist
|
||||
|
||||
|
@ -44,6 +30,7 @@ Fixes # (issue)
|
|||
|
||||
- I haven't read the [contributing guide](https://github.com/calcom/cal.com/blob/main/CONTRIBUTING.md)
|
||||
- My code doesn't follow the style guidelines of this project
|
||||
- I haven't performed a self-review of my own code and corrected any misspellings
|
||||
- I haven't commented my code, particularly in hard-to-understand areas
|
||||
- I haven't checked if my PR needs changes to the documentation
|
||||
- I haven't checked if my changes generate no new warnings
|
||||
|
|
|
@ -1,29 +0,0 @@
|
|||
name: Cache production build binaries
|
||||
description: "Cache or restore if necessary"
|
||||
inputs:
|
||||
node_version:
|
||||
required: false
|
||||
default: v18.x
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Cache production build
|
||||
uses: buildjet/cache@v3
|
||||
id: cache-build
|
||||
env:
|
||||
cache-name: prod-build
|
||||
key-1: ${{ inputs.node_version }}-${{ hashFiles('yarn.lock') }}
|
||||
key-2: ${{ hashFiles('apps/**/**.[jt]s', 'apps/**/**.[jt]sx', 'packages/**/**.[jt]s', 'packages/**/**.[jt]sx', '!**/node_modules') }}
|
||||
key-3: ${{ github.event.pull_request.number || github.ref }}
|
||||
# Ensures production-build.yml will always be fresh
|
||||
key-4: ${{ github.sha }}
|
||||
with:
|
||||
path: |
|
||||
${{ github.workspace }}/apps/web/.next
|
||||
${{ github.workspace }}/apps/web/public/embed
|
||||
**/.turbo/**
|
||||
**/dist/**
|
||||
key: ${{ runner.os }}-${{ env.cache-name }}-${{ env.key-1 }}-${{ env.key-2 }}-${{ env.key-3 }}-${{ env.key-4 }}
|
||||
- run: yarn build
|
||||
if: steps.cache-build.outputs.cache-hit != 'true'
|
||||
shell: bash
|
|
@ -1,38 +0,0 @@
|
|||
name: Cache database between jobs
|
||||
description: "Cache or restore if necessary"
|
||||
inputs:
|
||||
DATABASE_URL:
|
||||
required: false
|
||||
default: "postgresql://postgres:@localhost:5432/calendso"
|
||||
path:
|
||||
required: false
|
||||
default: "backups/backup.sql"
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Cache database
|
||||
id: cache-db
|
||||
uses: buildjet/cache@v3
|
||||
env:
|
||||
cache-name: cache-db
|
||||
key-1: ${{ hashFiles('packages/prisma/schema.prisma', 'packages/prisma/migrations/**/**.sql', 'packages/prisma/*.ts') }}
|
||||
key-2: ${{ github.event.pull_request.number || github.ref }}
|
||||
with:
|
||||
path: ${{ inputs.path }}
|
||||
key: ${{ runner.os }}-${{ env.cache-name }}-${{ inputs.path }}-${{ env.key-1 }}-${{ env.key-2 }}
|
||||
- run: yarn db-seed
|
||||
if: steps.cache-db.outputs.cache-hit != 'true'
|
||||
shell: bash
|
||||
- name: Postgres Dump Backup
|
||||
if: steps.cache-db.outputs.cache-hit != 'true'
|
||||
uses: tj-actions/pg-dump@v2.3
|
||||
with:
|
||||
database_url: ${{ inputs.DATABASE_URL }}
|
||||
path: ${{ inputs.path }}
|
||||
options: "-O"
|
||||
- name: Postgres Backup Restore
|
||||
if: steps.cache-db.outputs.cache-hit == 'true'
|
||||
uses: tj-actions/pg-restore@v4.5
|
||||
with:
|
||||
database_url: ${{ inputs.DATABASE_URL }}
|
||||
backup_file: ${{ inputs.path }}
|
|
@ -1,10 +0,0 @@
|
|||
name: Dangerous git Checkout
|
||||
description: "Git Checkout from PR code so we can run checks from forks"
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 2
|
|
@ -1,71 +0,0 @@
|
|||
########################################################################################
|
||||
# "yarn install" composite action for yarn 2/3/4+ and "nodeLinker: node-modules" #
|
||||
#--------------------------------------------------------------------------------------#
|
||||
# Cache: #
|
||||
# - Downloaded zip archive (multi-arch, preserved across yarn.lock changes) #
|
||||
# - Yarn install state (discarded on yarn.lock changes) #
|
||||
# References: #
|
||||
# - bench: https://gist.github.com/belgattitude/0ecd26155b47e7be1be6163ecfbb0f0b #
|
||||
# - vs @setup/node: https://github.com/actions/setup-node/issues/325 #
|
||||
########################################################################################
|
||||
|
||||
name: "Yarn install"
|
||||
description: "Run yarn install with node_modules linker and cache enabled"
|
||||
inputs:
|
||||
node_version:
|
||||
required: false
|
||||
default: v18.x
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Use Node ${{ inputs.node_version }}
|
||||
uses: buildjet/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ inputs.node_version }}
|
||||
- name: Expose yarn config as "$GITHUB_OUTPUT"
|
||||
id: yarn-config
|
||||
shell: bash
|
||||
run: |
|
||||
echo "CACHE_FOLDER=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT
|
||||
|
||||
# Yarn rotates the downloaded cache archives, @see https://github.com/actions/setup-node/issues/325
|
||||
# Yarn cache is also reusable between arch and os.
|
||||
- name: Restore yarn cache
|
||||
uses: buildjet/cache@v3
|
||||
id: yarn-download-cache
|
||||
with:
|
||||
path: ${{ steps.yarn-config.outputs.CACHE_FOLDER }}
|
||||
key: yarn-download-cache-${{ hashFiles('yarn.lock') }}
|
||||
restore-keys: |
|
||||
yarn-download-cache-
|
||||
|
||||
# Invalidated on yarn.lock changes
|
||||
- name: Restore node_modules
|
||||
id: yarn-nm-cache
|
||||
uses: buildjet/cache@v3
|
||||
with:
|
||||
path: "**/node_modules/"
|
||||
key: ${{ runner.os }}-yarn-nm-cache-${{ hashFiles('yarn.lock', '.yarnrc.yml') }}
|
||||
|
||||
# Invalidated on yarn.lock changes
|
||||
- name: Restore yarn install state
|
||||
id: yarn-install-state-cache
|
||||
uses: buildjet/cache@v3
|
||||
with:
|
||||
path: .yarn/ci-cache/
|
||||
key: ${{ runner.os }}-yarn-install-state-cache-${{ hashFiles('yarn.lock', '.yarnrc.yml') }}
|
||||
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
yarn install --inline-builds
|
||||
yarn prisma generate
|
||||
env:
|
||||
# CI optimizations. Overrides yarnrc.yml options (or their defaults) in the CI action.
|
||||
YARN_ENABLE_IMMUTABLE_INSTALLS: "false" # So it doesn't try to remove our private submodule deps
|
||||
YARN_ENABLE_GLOBAL_CACHE: "false" # Use local cache folder to keep downloaded archives
|
||||
YARN_INSTALL_STATE_PATH: .yarn/ci-cache/install-state.gz # Very small speedup when lock does not change
|
||||
YARN_NM_MODE: "hardlinks-local" # Reduce node_modules size
|
||||
# Other environment variables
|
||||
HUSKY: "0" # By default do not run HUSKY install
|
|
@ -1,19 +0,0 @@
|
|||
name: Install playwright binaries
|
||||
description: "Install playwright, cache and restore if necessary"
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Cache playwright binaries
|
||||
id: playwright-cache
|
||||
uses: buildjet/cache@v2
|
||||
with:
|
||||
path: |
|
||||
~/Library/Caches/ms-playwright
|
||||
~/.cache/ms-playwright
|
||||
${{ github.workspace }}/node_modules/playwright
|
||||
key: cache-playwright-${{ hashFiles('**/yarn.lock') }}
|
||||
restore-keys: cache-playwright-
|
||||
- name: Yarn playwright install
|
||||
shell: bash
|
||||
if: steps.playwright-cache.outputs.cache-hit != 'true'
|
||||
run: yarn playwright install
|
|
@ -1,6 +0,0 @@
|
|||
"❗️ migrations":
|
||||
- packages/prisma/migrations/**/migration.sql
|
||||
|
||||
"❗️ .env changes":
|
||||
- .env.example
|
||||
- .env.appStore.example
|
|
@ -1,18 +0,0 @@
|
|||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "tsc-absolute",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "(?:^|\\s)([^\\s].*)[\\(:](\\d+)[,:](\\d+)(?:\\):\\s+|\\s+-\\s+)(error|warning|info)\\s+(TS\\d+)\\s*:\\s*(.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"severity": 4,
|
||||
"code": 5,
|
||||
"message": 6
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1,74 +0,0 @@
|
|||
name: "Apply issue labels to PR"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
|
||||
jobs:
|
||||
label_on_pr:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: none
|
||||
issues: read
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Apply labels from linked issue to PR
|
||||
uses: actions/github-script@v5
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
async function getLinkedIssues(owner, repo, prNumber) {
|
||||
const query = `query GetLinkedIssues($owner: String!, $repo: String!, $prNumber: Int!) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
pullRequest(number: $prNumber) {
|
||||
closingIssuesReferences(first: 10) {
|
||||
nodes {
|
||||
number
|
||||
labels(first: 10) {
|
||||
nodes {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}`;
|
||||
|
||||
const variables = {
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
prNumber: prNumber,
|
||||
};
|
||||
|
||||
const result = await github.graphql(query, variables);
|
||||
return result.repository.pullRequest.closingIssuesReferences.nodes;
|
||||
}
|
||||
|
||||
const pr = context.payload.pull_request;
|
||||
const linkedIssues = await getLinkedIssues(
|
||||
context.repo.owner,
|
||||
context.repo.repo,
|
||||
pr.number
|
||||
);
|
||||
|
||||
const labelsToAdd = new Set();
|
||||
for (const issue of linkedIssues) {
|
||||
if (issue.labels && issue.labels.nodes) {
|
||||
for (const label of issue.labels.nodes) {
|
||||
labelsToAdd.add(label.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (labelsToAdd.size) {
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: pr.number,
|
||||
labels: Array.from(labelsToAdd),
|
||||
});
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
name: cleanup caches by a branch
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- closed
|
||||
|
||||
jobs:
|
||||
cleanup:
|
||||
runs-on: buildjet-4vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Cleanup
|
||||
run: |
|
||||
gh extension install actions/gh-actions-cache
|
||||
|
||||
REPO=${{ github.repository }}
|
||||
BRANCH="refs/pull/${{ github.event.pull_request.number }}/merge"
|
||||
|
||||
echo "Fetching list of cache key"
|
||||
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH | cut -f 1 )
|
||||
|
||||
## Setting this to not fail the workflow while deleting cache keys.
|
||||
set +e
|
||||
echo "Deleting caches..."
|
||||
for cacheKey in $cacheKeysForPR
|
||||
do
|
||||
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
|
||||
done
|
||||
echo "Done"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
@ -1,39 +0,0 @@
|
|||
# .github/workflows/chromatic.yml
|
||||
|
||||
# Workflow name
|
||||
name: "Chromatic"
|
||||
|
||||
# Event for the workflow
|
||||
on:
|
||||
pull_request_target: # So we can test on forks
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- apps/storybook/**
|
||||
- packages/ui/**
|
||||
workflow_dispatch:
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
# List of jobs
|
||||
jobs:
|
||||
chromatic-deployment:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }} # So we can test on forks
|
||||
fetch-depth: 0
|
||||
- name: Install dependencies
|
||||
run: yarn
|
||||
# 👇 Adds Chromatic as a step in the workflow
|
||||
- name: Publish to Chromatic
|
||||
uses: chromaui/action@v1
|
||||
# Options required to the GitHub Chromatic Action
|
||||
with:
|
||||
# 👇 Chromatic projectToken, refer to the manage page to obtain it.
|
||||
projectToken: ${{ secrets.CHROMATIC_PROJECT_TOKEN }}
|
||||
workingDir: "apps/storybook"
|
||||
buildScriptName: "build"
|
|
@ -1,19 +1,26 @@
|
|||
name: Check types
|
||||
on:
|
||||
workflow_call:
|
||||
env:
|
||||
NODE_OPTIONS: "--max-old-space-size=8192"
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
jobs:
|
||||
check-types:
|
||||
runs-on: buildjet-4vcpu-ubuntu-2204
|
||||
types:
|
||||
name: Check types
|
||||
strategy:
|
||||
matrix:
|
||||
node: ["14.x"]
|
||||
os: [ubuntu-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
- uses: ./.github/actions/yarn-install
|
||||
- name: Show info
|
||||
run: node -e "console.log(require('v8').getHeapStatistics())"
|
||||
- name: Configure TSC problem matcher
|
||||
run: |
|
||||
echo "::remove-matcher owner=tsc::"
|
||||
echo "::add-matcher::.github/matchers/tsc-absolute.json"
|
||||
- run: yarn type-check:ci
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Use Node ${{ matrix.node }}
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
- run: yarn
|
||||
- run: yarn type-check
|
||||
|
|
|
@ -4,10 +4,10 @@ on:
|
|||
# "Scheduled workflows run on the latest commit on the default or base branch."
|
||||
# — https://docs.github.com/en/actions/learn-github-actions/events-that-trigger-workflows#schedule
|
||||
schedule:
|
||||
# Runs "At every 15th minute." (see https://crontab.guru)
|
||||
- cron: "*/15 * * * *"
|
||||
# Runs “At minute 0, 15, 30, and 45.” (see https://crontab.guru)
|
||||
- cron: "0,15,30,45 * * * *"
|
||||
jobs:
|
||||
cron-bookingReminder:
|
||||
cron:
|
||||
env:
|
||||
APP_URL: ${{ secrets.APP_URL }}
|
||||
CRON_API_KEY: ${{ secrets.CRON_API_KEY }}
|
||||
|
@ -20,4 +20,4 @@ jobs:
|
|||
-X POST \
|
||||
-H 'content-type: application/json' \
|
||||
-H 'authorization: ${{ secrets.CRON_API_KEY }}' \
|
||||
-sSf
|
||||
--fail
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
name: Cron - downgradeUsers
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
# "Scheduled workflows run on the latest commit on the default or base branch."
|
||||
# — https://docs.github.com/en/actions/learn-github-actions/events-that-trigger-workflows#schedule
|
||||
schedule:
|
||||
# Runs "At 00:00 on day-of-month 1." (see https://crontab.guru)
|
||||
- cron: "0 0 1 * *"
|
||||
# Runs “At minute 0, 15, 30, and 45.” (see https://crontab.guru)
|
||||
- cron: "0,15,30,45 * * * *"
|
||||
jobs:
|
||||
cron-downgradeUsers:
|
||||
cron:
|
||||
env:
|
||||
APP_URL: ${{ secrets.APP_URL }}
|
||||
CRON_API_KEY: ${{ secrets.CRON_API_KEY }}
|
||||
|
@ -21,4 +20,4 @@ jobs:
|
|||
-X POST \
|
||||
-H 'content-type: application/json' \
|
||||
-H 'authorization: ${{ secrets.CRON_API_KEY }}' \
|
||||
-sSf
|
||||
--fail
|
||||
|
|
|
@ -1,33 +0,0 @@
|
|||
name: Cron - monthlyDigestEmail
|
||||
|
||||
on:
|
||||
# "Scheduled workflows run on the latest commit on the default or base branch."
|
||||
# — https://docs.github.com/en/actions/learn-github-actions/events-that-trigger-workflows#schedule
|
||||
schedule:
|
||||
# Runs on the 28th, 29th, 30th and 31st of every month (see https://crontab.guru)
|
||||
- cron: "59 23 28-31 * *"
|
||||
jobs:
|
||||
cron-monthlyDigestEmail:
|
||||
env:
|
||||
APP_URL: ${{ secrets.APP_URL }}
|
||||
CRON_API_KEY: ${{ secrets.CRON_API_KEY }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check if today is the last day of the month
|
||||
id: check-last-day
|
||||
run: |
|
||||
LAST_DAY=$(date -d tomorrow +%d)
|
||||
if [ "$LAST_DAY" == "01" ]; then
|
||||
echo "::set-output name=is_last_day::true"
|
||||
else
|
||||
echo "::set-output name=is_last_day::false"
|
||||
fi
|
||||
|
||||
- name: cURL request
|
||||
if: ${{ env.APP_URL && env.CRON_API_KEY && steps.check-last-day.outputs.is_last_day == 'true' }}
|
||||
run: |
|
||||
curl ${{ secrets.APP_URL }}/api/cron/monthlyDigestEmail \
|
||||
-X POST \
|
||||
-H 'content-type: application/json' \
|
||||
-H 'authorization: ${{ secrets.CRON_API_KEY }}' \
|
||||
--fail
|
|
@ -1,23 +0,0 @@
|
|||
name: Cron - scheduleEmailReminders
|
||||
|
||||
on:
|
||||
# "Scheduled workflows run on the latest commit on the default or base branch."
|
||||
# — https://docs.github.com/en/actions/learn-github-actions/events-that-trigger-workflows#schedule
|
||||
schedule:
|
||||
# Runs "At every 15th minute." (see https://crontab.guru)
|
||||
- cron: "*/15 * * * *"
|
||||
jobs:
|
||||
cron-scheduleEmailReminders:
|
||||
env:
|
||||
APP_URL: ${{ secrets.APP_URL }}
|
||||
CRON_API_KEY: ${{ secrets.CRON_API_KEY }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: cURL request
|
||||
if: ${{ env.APP_URL && env.CRON_API_KEY }}
|
||||
run: |
|
||||
curl ${{ secrets.APP_URL }}/api/cron/workflows/scheduleEmailReminders \
|
||||
-X POST \
|
||||
-H 'content-type: application/json' \
|
||||
-H 'authorization: ${{ secrets.CRON_API_KEY }}' \
|
||||
-sSf
|
|
@ -1,23 +0,0 @@
|
|||
name: Cron - scheduleSMSReminders
|
||||
|
||||
on:
|
||||
# "Scheduled workflows run on the latest commit on the default or base branch."
|
||||
# — https://docs.github.com/en/actions/learn-github-actions/events-that-trigger-workflows#schedule
|
||||
schedule:
|
||||
# Runs "At every 15th minute." (see https://crontab.guru)
|
||||
- cron: "*/15 * * * *"
|
||||
jobs:
|
||||
cron-scheduleSMSReminders:
|
||||
env:
|
||||
APP_URL: ${{ secrets.APP_URL }}
|
||||
CRON_API_KEY: ${{ secrets.CRON_API_KEY }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: cURL request
|
||||
if: ${{ env.APP_URL && env.CRON_API_KEY }}
|
||||
run: |
|
||||
curl ${{ secrets.APP_URL }}/api/cron/workflows/scheduleSMSReminders \
|
||||
-X POST \
|
||||
-H 'content-type: application/json' \
|
||||
-H 'authorization: ${{ secrets.CRON_API_KEY }}' \
|
||||
-sSf
|
|
@ -1,23 +0,0 @@
|
|||
name: Cron - scheduleWhatsappReminders
|
||||
|
||||
on:
|
||||
# "Scheduled workflows run on the latest commit on the default or base branch."
|
||||
# — https://docs.github.com/en/actions/learn-github-actions/events-that-trigger-workflows#schedule
|
||||
schedule:
|
||||
# Runs "At every 15th minute." (see https://crontab.guru)
|
||||
- cron: "*/15 * * * *"
|
||||
jobs:
|
||||
cron-scheduleWhatsappReminders:
|
||||
env:
|
||||
APP_URL: ${{ secrets.APP_URL }}
|
||||
CRON_API_KEY: ${{ secrets.CRON_API_KEY }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: cURL request
|
||||
if: ${{ env.APP_URL && env.CRON_API_KEY }}
|
||||
run: |
|
||||
curl ${{ secrets.APP_URL }}/api/cron/workflows/scheduleWhatsappReminders \
|
||||
-X POST \
|
||||
-H 'content-type: application/json' \
|
||||
-H 'authorization: ${{ secrets.CRON_API_KEY }}' \
|
||||
-sSf
|
|
@ -1,26 +0,0 @@
|
|||
name: Cron - mark stale for inactive issues
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
on:
|
||||
# "Scheduled workflows run on the latest commit on the default or base branch."
|
||||
# — https://docs.github.com/en/actions/learn-github-actions/events-that-trigger-workflows#schedule
|
||||
schedule:
|
||||
# Runs "At 00:00." every day (see https://crontab.guru)
|
||||
- cron: "0 0 * * *"
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v7
|
||||
with:
|
||||
days-before-issue-stale: 60
|
||||
days-before-issue-close: -1
|
||||
days-before-pr-stale: 14
|
||||
days-before-pr-close: 7
|
||||
stale-pr-message: "This PR is being marked as stale due to inactivity."
|
||||
close-pr-message: "This PR is being closed due to inactivity. Please reopen if work is intended to be continued."
|
||||
operations-per-run: 100
|
|
@ -1,24 +0,0 @@
|
|||
name: Cron - syncAppMeta
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
# "Scheduled workflows run on the latest commit on the default or base branch."
|
||||
# — https://docs.github.com/en/actions/learn-github-actions/events-that-trigger-workflows#schedule
|
||||
schedule:
|
||||
# Runs "At 00:00 on day-of-month 1." (see https://crontab.guru)
|
||||
- cron: "0 0 1 * *"
|
||||
jobs:
|
||||
cron-syncAppMeta:
|
||||
env:
|
||||
APP_URL: ${{ secrets.APP_URL }}
|
||||
CRON_API_KEY: ${{ secrets.CRON_API_KEY }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: cURL request
|
||||
if: ${{ env.APP_URL && env.CRON_API_KEY }}
|
||||
run: |
|
||||
curl ${{ secrets.APP_URL }}/api/cron/syncAppMeta \
|
||||
-X POST \
|
||||
-H 'content-type: application/json' \
|
||||
-H 'authorization: ${{ secrets.CRON_API_KEY }}' \
|
||||
-sSf
|
|
@ -1,23 +0,0 @@
|
|||
name: Cron - webhookTriggers
|
||||
|
||||
on:
|
||||
# "Scheduled workflows run on the latest commit on the default or base branch."
|
||||
# — https://docs.github.com/en/actions/learn-github-actions/events-that-trigger-workflows#schedule
|
||||
schedule:
|
||||
# Runs “every 5 minutes” (see https://crontab.guru)
|
||||
- cron: "*/5 * * * *"
|
||||
jobs:
|
||||
cron-webhookTriggers:
|
||||
env:
|
||||
APP_URL: ${{ secrets.APP_URL }}
|
||||
CRON_API_KEY: ${{ secrets.CRON_API_KEY }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: cURL request
|
||||
if: ${{ env.APP_URL && env.CRON_API_KEY }}
|
||||
run: |
|
||||
curl ${{ secrets.APP_URL }}/api/cron/webhookTriggers \
|
||||
-X POST \
|
||||
-H 'content-type: application/json' \
|
||||
-H 'authorization: ${{ secrets.CRON_API_KEY }}' \
|
||||
--fail
|
|
@ -4,39 +4,22 @@ on:
|
|||
push:
|
||||
branches:
|
||||
- main
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
synchronize-with-crowdin:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ secrets.GH_ACCESS_TOKEN }}
|
||||
|
||||
- name: crowdin action
|
||||
uses: crowdin/github-action@v1.13.0
|
||||
with:
|
||||
# upload sources
|
||||
upload_sources: true
|
||||
|
||||
# upload translations (& auto-approve 'em)
|
||||
upload_translations_args: '--auto-approve-imported'
|
||||
upload_translations: true
|
||||
push_translations: true
|
||||
|
||||
# download translations
|
||||
download_translations: true
|
||||
|
||||
# GH config
|
||||
commit_message: "New Crowdin translations by Github Action"
|
||||
localization_branch_name: main
|
||||
create_pull_request: false
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GH_ACCESS_TOKEN }}
|
||||
CROWDIN_PROJECT_ID: ${{ secrets.CROWDIN_PROJECT_ID }}
|
||||
CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: crowdin action
|
||||
uses: crowdin/github-action@1.4.2
|
||||
with:
|
||||
upload_translations: true
|
||||
download_translations: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CROWDIN_PROJECT_ID: ${{ secrets.CROWDIN_PROJECT_ID }}
|
||||
CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}
|
||||
|
|
|
@ -1,71 +0,0 @@
|
|||
name: E2E App-Store Apps
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
e2e-app-store:
|
||||
timeout-minutes: 20
|
||||
name: E2E App Store (${{ matrix.shard }}/${{ strategy.job-total }})
|
||||
runs-on: buildjet-4vcpu-ubuntu-2204
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:12.1
|
||||
env:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_DB: calendso
|
||||
ports:
|
||||
- 5432:5432
|
||||
mailhog:
|
||||
image: mailhog/mailhog:v1.0.1
|
||||
ports:
|
||||
- 8025:8025
|
||||
- 1025:1025
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
## There aren't many tests for AppStore. So, just start with 2 shards. Increase if needed.
|
||||
shard: [1, 2]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
- run: echo 'NODE_OPTIONS="--max_old_space_size=4096"' >> $GITHUB_ENV
|
||||
- uses: ./.github/actions/yarn-install
|
||||
- uses: ./.github/actions/yarn-playwright-install
|
||||
- uses: ./.github/actions/cache-db
|
||||
- uses: ./.github/actions/cache-build
|
||||
- name: Run Tests
|
||||
run: yarn e2e:app-store --shard=${{ matrix.shard }}/${{ strategy.job-total }}
|
||||
env:
|
||||
ALLOWED_HOSTNAMES: ${{ vars.CI_ALLOWED_HOSTNAMES }}
|
||||
CALENDSO_ENCRYPTION_KEY: ${{ secrets.CI_CALENDSO_ENCRYPTION_KEY }}
|
||||
DATABASE_URL: ${{ secrets.CI_DATABASE_URL }}
|
||||
DEPLOYSENTINEL_API_KEY: ${{ secrets.DEPLOYSENTINEL_API_KEY }}
|
||||
E2E_TEST_APPLE_CALENDAR_EMAIL: ${{ secrets.E2E_TEST_APPLE_CALENDAR_EMAIL }}
|
||||
E2E_TEST_APPLE_CALENDAR_PASSWORD: ${{ secrets.E2E_TEST_APPLE_CALENDAR_PASSWORD }}
|
||||
E2E_TEST_MAILHOG_ENABLED: ${{ vars.E2E_TEST_MAILHOG_ENABLED }}
|
||||
GOOGLE_API_CREDENTIALS: ${{ secrets.CI_GOOGLE_API_CREDENTIALS }}
|
||||
GOOGLE_LOGIN_ENABLED: ${{ vars.CI_GOOGLE_LOGIN_ENABLED }}
|
||||
NEXTAUTH_SECRET: ${{ secrets.CI_NEXTAUTH_SECRET }}
|
||||
NEXTAUTH_URL: ${{ secrets.CI_NEXTAUTH_URL }}
|
||||
NEXT_PUBLIC_IS_E2E: ${{ vars.CI_NEXT_PUBLIC_IS_E2E }}
|
||||
NEXT_PUBLIC_STRIPE_PUBLIC_KEY: ${{ secrets.CI_NEXT_PUBLIC_STRIPE_PUBLIC_KEY }}
|
||||
NEXT_PUBLIC_WEBAPP_URL: ${{ vars.CI_NEXT_PUBLIC_WEBAPP_URL }}
|
||||
NEXT_PUBLIC_WEBSITE_URL: ${{ vars.CI_NEXT_PUBLIC_WEBSITE_URL }}
|
||||
PAYMENT_FEE_FIXED: ${{ vars.CI_PAYMENT_FEE_FIXED }}
|
||||
PAYMENT_FEE_PERCENTAGE: ${{ vars.CI_PAYMENT_FEE_PERCENTAGE }}
|
||||
SAML_ADMINS: ${{ secrets.CI_SAML_ADMINS }}
|
||||
SAML_DATABASE_URL: ${{ secrets.CI_SAML_DATABASE_URL }}
|
||||
STRIPE_PRIVATE_KEY: ${{ secrets.CI_STRIPE_PRIVATE_KEY }}
|
||||
STRIPE_CLIENT_ID: ${{ secrets.CI_STRIPE_CLIENT_ID }}
|
||||
STRIPE_WEBHOOK_SECRET: ${{ secrets.CI_STRIPE_WEBHOOK_SECRET }}
|
||||
SENDGRID_API_KEY: ${{ secrets.CI_SENDGRID_API_KEY }}
|
||||
SENDGRID_EMAIL: ${{ secrets.CI_SENDGRID_EMAIL }}
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||
- name: Upload Test Results
|
||||
if: ${{ always() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: app-store-results-${{ matrix.shard }}_${{ strategy.job-total }}
|
||||
path: test-results
|
|
@ -1,67 +0,0 @@
|
|||
name: E2E Embed React tests and booking flow(for non-embed as well)
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
e2e-embed:
|
||||
timeout-minutes: 20
|
||||
name: E2E Embed React (${{ matrix.shard }}/${{ strategy.job-total }})
|
||||
runs-on: buildjet-4vcpu-ubuntu-2204
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:12.1
|
||||
env:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_DB: calendso
|
||||
ports:
|
||||
- 5432:5432
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
## There aren't many tests for embed-react. So, just start with 2 shards. Increase if needed.
|
||||
shard: [1, 2]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
- run: echo 'NODE_OPTIONS="--max_old_space_size=4096"' >> $GITHUB_ENV
|
||||
- uses: ./.github/actions/yarn-install
|
||||
- uses: ./.github/actions/yarn-playwright-install
|
||||
- uses: ./.github/actions/cache-db
|
||||
- uses: ./.github/actions/cache-build
|
||||
- name: Run Tests
|
||||
run: |
|
||||
yarn e2e:embed-react --shard=${{ matrix.shard }}/${{ strategy.job-total }}
|
||||
yarn workspace @calcom/embed-react packaged:tests
|
||||
env:
|
||||
ALLOWED_HOSTNAMES: ${{ vars.CI_ALLOWED_HOSTNAMES }}
|
||||
CALENDSO_ENCRYPTION_KEY: ${{ secrets.CI_CALENDSO_ENCRYPTION_KEY }}
|
||||
DATABASE_URL: ${{ secrets.CI_DATABASE_URL }}
|
||||
DEPLOYSENTINEL_API_KEY: ${{ secrets.DEPLOYSENTINEL_API_KEY }}
|
||||
E2E_TEST_APPLE_CALENDAR_EMAIL: ${{ secrets.E2E_TEST_APPLE_CALENDAR_EMAIL }}
|
||||
E2E_TEST_APPLE_CALENDAR_PASSWORD: ${{ secrets.E2E_TEST_APPLE_CALENDAR_PASSWORD }}
|
||||
GOOGLE_API_CREDENTIALS: ${{ secrets.CI_GOOGLE_API_CREDENTIALS }}
|
||||
GOOGLE_LOGIN_ENABLED: ${{ vars.CI_GOOGLE_LOGIN_ENABLED }}
|
||||
NEXTAUTH_SECRET: ${{ secrets.CI_NEXTAUTH_SECRET }}
|
||||
NEXTAUTH_URL: ${{ secrets.CI_NEXTAUTH_URL }}
|
||||
NEXT_PUBLIC_IS_E2E: ${{ vars.CI_NEXT_PUBLIC_IS_E2E }}
|
||||
NEXT_PUBLIC_STRIPE_PUBLIC_KEY: ${{ secrets.CI_NEXT_PUBLIC_STRIPE_PUBLIC_KEY }}
|
||||
NEXT_PUBLIC_WEBAPP_URL: ${{ vars.CI_NEXT_PUBLIC_WEBAPP_URL }}
|
||||
NEXT_PUBLIC_WEBSITE_URL: ${{ vars.CI_NEXT_PUBLIC_WEBSITE_URL }}
|
||||
PAYMENT_FEE_FIXED: ${{ vars.CI_PAYMENT_FEE_FIXED }}
|
||||
PAYMENT_FEE_PERCENTAGE: ${{ vars.CI_PAYMENT_FEE_PERCENTAGE }}
|
||||
SAML_ADMINS: ${{ secrets.CI_SAML_ADMINS }}
|
||||
SAML_DATABASE_URL: ${{ secrets.CI_SAML_DATABASE_URL }}
|
||||
STRIPE_PRIVATE_KEY: ${{ secrets.CI_STRIPE_PRIVATE_KEY }}
|
||||
STRIPE_CLIENT_ID: ${{ secrets.CI_STRIPE_CLIENT_ID }}
|
||||
STRIPE_WEBHOOK_SECRET: ${{ secrets.CI_STRIPE_WEBHOOK_SECRET }}
|
||||
SENDGRID_API_KEY: ${{ secrets.CI_SENDGRID_API_KEY }}
|
||||
SENDGRID_EMAIL: ${{ secrets.CI_SENDGRID_EMAIL }}
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||
- name: Upload Test Results
|
||||
if: ${{ always() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: embed-react-results-${{ matrix.shard }}_${{ strategy.job-total }}
|
||||
path: test-results
|
|
@ -1,71 +0,0 @@
|
|||
name: E2E Embed Core tests and booking flow(for non-embed as well)
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
e2e-embed:
|
||||
timeout-minutes: 20
|
||||
name: E2E Embed Core (${{ matrix.shard }}/${{ strategy.job-total }})
|
||||
runs-on: buildjet-4vcpu-ubuntu-2204
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:12.1
|
||||
env:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_DB: calendso
|
||||
ports:
|
||||
- 5432:5432
|
||||
mailhog:
|
||||
image: mailhog/mailhog:v1.0.1
|
||||
ports:
|
||||
- 8025:8025
|
||||
- 1025:1025
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
## There aren't many tests for embed-core. So, just start with 2 shards. Increase if needed.
|
||||
shard: [1, 2]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
- run: echo 'NODE_OPTIONS="--max_old_space_size=4096"' >> $GITHUB_ENV
|
||||
- uses: ./.github/actions/yarn-install
|
||||
- uses: ./.github/actions/yarn-playwright-install
|
||||
- uses: ./.github/actions/cache-db
|
||||
- uses: ./.github/actions/cache-build
|
||||
- name: Run Tests
|
||||
run: yarn e2e:embed --shard=${{ matrix.shard }}/${{ strategy.job-total }}
|
||||
env:
|
||||
ALLOWED_HOSTNAMES: ${{ vars.CI_ALLOWED_HOSTNAMES }}
|
||||
CALENDSO_ENCRYPTION_KEY: ${{ secrets.CI_CALENDSO_ENCRYPTION_KEY }}
|
||||
DATABASE_URL: ${{ secrets.CI_DATABASE_URL }}
|
||||
DEPLOYSENTINEL_API_KEY: ${{ secrets.DEPLOYSENTINEL_API_KEY }}
|
||||
E2E_TEST_APPLE_CALENDAR_EMAIL: ${{ secrets.E2E_TEST_APPLE_CALENDAR_EMAIL }}
|
||||
E2E_TEST_APPLE_CALENDAR_PASSWORD: ${{ secrets.E2E_TEST_APPLE_CALENDAR_PASSWORD }}
|
||||
E2E_TEST_MAILHOG_ENABLED: ${{ vars.E2E_TEST_MAILHOG_ENABLED }}
|
||||
GOOGLE_API_CREDENTIALS: ${{ secrets.CI_GOOGLE_API_CREDENTIALS }}
|
||||
GOOGLE_LOGIN_ENABLED: ${{ vars.CI_GOOGLE_LOGIN_ENABLED }}
|
||||
NEXTAUTH_SECRET: ${{ secrets.CI_NEXTAUTH_SECRET }}
|
||||
NEXTAUTH_URL: ${{ secrets.CI_NEXTAUTH_URL }}
|
||||
NEXT_PUBLIC_IS_E2E: ${{ vars.CI_NEXT_PUBLIC_IS_E2E }}
|
||||
NEXT_PUBLIC_STRIPE_PUBLIC_KEY: ${{ secrets.CI_NEXT_PUBLIC_STRIPE_PUBLIC_KEY }}
|
||||
NEXT_PUBLIC_WEBAPP_URL: ${{ vars.CI_NEXT_PUBLIC_WEBAPP_URL }}
|
||||
NEXT_PUBLIC_WEBSITE_URL: ${{ vars.CI_NEXT_PUBLIC_WEBSITE_URL }}
|
||||
PAYMENT_FEE_FIXED: ${{ vars.CI_PAYMENT_FEE_FIXED }}
|
||||
PAYMENT_FEE_PERCENTAGE: ${{ vars.CI_PAYMENT_FEE_PERCENTAGE }}
|
||||
SAML_ADMINS: ${{ secrets.CI_SAML_ADMINS }}
|
||||
SAML_DATABASE_URL: ${{ secrets.CI_SAML_DATABASE_URL }}
|
||||
STRIPE_PRIVATE_KEY: ${{ secrets.CI_STRIPE_PRIVATE_KEY }}
|
||||
STRIPE_CLIENT_ID: ${{ secrets.CI_STRIPE_CLIENT_ID }}
|
||||
STRIPE_WEBHOOK_SECRET: ${{ secrets.CI_STRIPE_WEBHOOK_SECRET }}
|
||||
SENDGRID_API_KEY: ${{ secrets.CI_SENDGRID_API_KEY }}
|
||||
SENDGRID_EMAIL: ${{ secrets.CI_SENDGRID_EMAIL }}
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||
- name: Upload Test Results
|
||||
if: ${{ always() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: embed-core-results-${{ matrix.shard }}_${{ strategy.job-total }}
|
||||
path: test-results
|
|
@ -1,13 +1,48 @@
|
|||
name: E2E test
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
pull_request_target: # So we can test on forks
|
||||
branches:
|
||||
- main
|
||||
paths-ignore:
|
||||
- public/static/locales/**
|
||||
jobs:
|
||||
e2e:
|
||||
test:
|
||||
timeout-minutes: 20
|
||||
name: E2E tests (${{ matrix.shard }}/${{ strategy.job-total }})
|
||||
runs-on: buildjet-4vcpu-ubuntu-2204
|
||||
name: Testing ${{ matrix.node }} and ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
node: ["14.x"]
|
||||
os: [ubuntu-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
env:
|
||||
DATABASE_URL: postgresql://postgres:@localhost:5432/calendso
|
||||
NEXT_PUBLIC_WEBAPP_URL: http://localhost:3000
|
||||
NEXT_PUBLIC_WEBSITE_URL: http://localhost:3000
|
||||
NEXTAUTH_SECRET: secret
|
||||
GOOGLE_API_CREDENTIALS: ${{ secrets.CI_GOOGLE_API_CREDENTIALS }}
|
||||
GOOGLE_LOGIN_ENABLED: true
|
||||
# CRON_API_KEY: xxx
|
||||
CALENDSO_ENCRYPTION_KEY: ${{ secrets.CI_CALENDSO_ENCRYPTION_KEY }}
|
||||
NEXT_PUBLIC_STRIPE_PUBLIC_KEY: ${{ secrets.CI_NEXT_PUBLIC_STRIPE_PUBLIC_KEY }}
|
||||
STRIPE_PRIVATE_KEY: ${{ secrets.CI_STRIPE_PRIVATE_KEY }}
|
||||
STRIPE_CLIENT_ID: ${{ secrets.CI_STRIPE_CLIENT_ID }}
|
||||
STRIPE_WEBHOOK_SECRET: ${{ secrets.CI_STRIPE_WEBHOOK_SECRET }}
|
||||
PAYMENT_FEE_PERCENTAGE: 0.005
|
||||
PAYMENT_FEE_FIXED: 10
|
||||
SAML_DATABASE_URL: postgresql://postgres:@localhost:5432/calendso
|
||||
SAML_ADMINS: pro@example.com
|
||||
NEXTAUTH_URL: http://localhost:3000/api/auth
|
||||
NEXT_PUBLIC_IS_E2E: 1
|
||||
# EMAIL_FROM: e2e@cal.com
|
||||
# EMAIL_SERVER_HOST: ${{ secrets.CI_EMAIL_SERVER_HOST }}
|
||||
# EMAIL_SERVER_PORT: ${{ secrets.CI_EMAIL_SERVER_PORT }}
|
||||
# EMAIL_SERVER_USER: ${{ secrets.CI_EMAIL_SERVER_USER }}
|
||||
# EMAIL_SERVER_PASSWORD: ${{ secrets.CI_EMAIL_SERVER_PASSWORD }}
|
||||
# MS_GRAPH_CLIENT_ID: xxx
|
||||
# MS_GRAPH_CLIENT_SECRET: xxx
|
||||
# ZOOM_CLIENT_ID: xxx
|
||||
# ZOOM_CLIENT_SECRET: xxx
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:12.1
|
||||
|
@ -16,59 +51,53 @@ jobs:
|
|||
POSTGRES_DB: calendso
|
||||
ports:
|
||||
- 5432:5432
|
||||
mailhog:
|
||||
image: mailhog/mailhog:v1.0.1
|
||||
ports:
|
||||
- 8025:8025
|
||||
- 1025:1025
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
shard: [1, 2, 3, 4, 5]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
- run: echo 'NODE_OPTIONS="--max_old_space_size=4096"' >> $GITHUB_ENV
|
||||
- uses: ./.github/actions/yarn-install
|
||||
- uses: ./.github/actions/yarn-playwright-install
|
||||
- uses: ./.github/actions/cache-db
|
||||
- uses: ./.github/actions/cache-build
|
||||
- name: Run Tests
|
||||
run: yarn e2e --shard=${{ matrix.shard }}/${{ strategy.job-total }}
|
||||
env:
|
||||
ALLOWED_HOSTNAMES: ${{ vars.CI_ALLOWED_HOSTNAMES }}
|
||||
CALENDSO_ENCRYPTION_KEY: ${{ secrets.CI_CALENDSO_ENCRYPTION_KEY }}
|
||||
DATABASE_URL: ${{ secrets.CI_DATABASE_URL }}
|
||||
DEPLOYSENTINEL_API_KEY: ${{ secrets.DEPLOYSENTINEL_API_KEY }}
|
||||
E2E_TEST_APPLE_CALENDAR_EMAIL: ${{ secrets.E2E_TEST_APPLE_CALENDAR_EMAIL }}
|
||||
E2E_TEST_APPLE_CALENDAR_PASSWORD: ${{ secrets.E2E_TEST_APPLE_CALENDAR_PASSWORD }}
|
||||
E2E_TEST_MAILHOG_ENABLED: ${{ vars.E2E_TEST_MAILHOG_ENABLED }}
|
||||
GOOGLE_API_CREDENTIALS: ${{ secrets.CI_GOOGLE_API_CREDENTIALS }}
|
||||
EMAIL_SERVER_HOST: ${{ secrets.CI_EMAIL_SERVER_HOST }}
|
||||
EMAIL_SERVER_PORT: ${{ secrets.CI_EMAIL_SERVER_PORT }}
|
||||
EMAIL_SERVER_USER: ${{ secrets.CI_EMAIL_SERVER_USER }}
|
||||
EMAIL_SERVER_PASSWORD: ${{ secrets.CI_EMAIL_SERVER_PASSWORD}}
|
||||
GOOGLE_LOGIN_ENABLED: ${{ vars.CI_GOOGLE_LOGIN_ENABLED }}
|
||||
NEXTAUTH_SECRET: ${{ secrets.CI_NEXTAUTH_SECRET }}
|
||||
NEXTAUTH_URL: ${{ secrets.CI_NEXTAUTH_URL }}
|
||||
NEXT_PUBLIC_IS_E2E: ${{ vars.CI_NEXT_PUBLIC_IS_E2E }}
|
||||
NEXT_PUBLIC_STRIPE_PUBLIC_KEY: ${{ secrets.CI_NEXT_PUBLIC_STRIPE_PUBLIC_KEY }}
|
||||
NEXT_PUBLIC_WEBAPP_URL: ${{ vars.CI_NEXT_PUBLIC_WEBAPP_URL }}
|
||||
NEXT_PUBLIC_WEBSITE_URL: ${{ vars.CI_NEXT_PUBLIC_WEBSITE_URL }}
|
||||
PAYMENT_FEE_FIXED: ${{ vars.CI_PAYMENT_FEE_FIXED }}
|
||||
PAYMENT_FEE_PERCENTAGE: ${{ vars.CI_PAYMENT_FEE_PERCENTAGE }}
|
||||
SAML_ADMINS: ${{ secrets.CI_SAML_ADMINS }}
|
||||
SAML_DATABASE_URL: ${{ secrets.CI_SAML_DATABASE_URL }}
|
||||
STRIPE_PRIVATE_KEY: ${{ secrets.CI_STRIPE_PRIVATE_KEY }}
|
||||
STRIPE_CLIENT_ID: ${{ secrets.CI_STRIPE_CLIENT_ID }}
|
||||
STRIPE_WEBHOOK_SECRET: ${{ secrets.CI_STRIPE_WEBHOOK_SECRET }}
|
||||
SENDGRID_API_KEY: ${{ secrets.CI_SENDGRID_API_KEY }}
|
||||
SENDGRID_EMAIL: ${{ secrets.CI_SENDGRID_EMAIL }}
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||
- name: Upload Test Results
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }} # So we can test on forks
|
||||
fetch-depth: 2
|
||||
|
||||
- name: Use Node ${{ matrix.node }}
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
# cache: "yarn"
|
||||
# cache-dependency-path: yarn.lock
|
||||
|
||||
- name: Turbo Cache
|
||||
id: turbo-cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: .turbo
|
||||
key: turbo-${{ github.job }}-${{ github.event.pull_request.head.ref }}-${{ github.event.pull_request.head.sha }}
|
||||
restore-keys: |
|
||||
turbo-${{ github.job }}-${{ github.event.pull_request.head.ref }}-
|
||||
- run: yarn
|
||||
- name: Cache playwright binaries
|
||||
uses: actions/cache@v2
|
||||
id: playwright-cache
|
||||
with:
|
||||
path: |
|
||||
~/Library/Caches/ms-playwright
|
||||
~/.cache/ms-playwright
|
||||
**/node_modules/playwright
|
||||
key: cache-playwright-${{ hashFiles('**/yarn.lock') }}
|
||||
|
||||
- name: Install playwright deps
|
||||
if: steps.playwright-cache.outputs.cache-hit != 'true'
|
||||
run: yarn playwright install --with-deps
|
||||
- run: yarn test-e2e
|
||||
|
||||
- name: Upload videos
|
||||
if: ${{ always() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: test-results-${{ matrix.shard }}_${{ strategy.job-total }}
|
||||
path: test-results
|
||||
name: videos
|
||||
path: |
|
||||
test-results
|
||||
playwright/screenshots
|
||||
playwright/videos
|
||||
playwright/results
|
||||
playwright/reports
|
||||
|
|
|
@ -1,18 +0,0 @@
|
|||
name: "Pull Request Labeler"
|
||||
on:
|
||||
- pull_request_target
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
jobs:
|
||||
labeler:
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@v4
|
||||
with:
|
||||
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
# https://github.com/actions/labeler/issues/442#issuecomment-1297359481
|
||||
sync-labels: ""
|
|
@ -1,15 +1,35 @@
|
|||
name: Lint
|
||||
on:
|
||||
workflow_call:
|
||||
pull_request_target:
|
||||
branches:
|
||||
- main
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: buildjet-4vcpu-ubuntu-2204
|
||||
strategy:
|
||||
matrix:
|
||||
node: ["14.x"]
|
||||
os: [ubuntu-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
- uses: ./.github/actions/yarn-install
|
||||
- name: Run Linting with Reports
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 2
|
||||
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
# cache: "yarn"
|
||||
# cache-dependency-path: yarn.lock
|
||||
|
||||
- name: Install deps
|
||||
if: steps.yarn-cache.outputs.cache-hit != 'true'
|
||||
run: yarn
|
||||
|
||||
- name: Lint
|
||||
run: yarn lint:report
|
||||
continue-on-error: true
|
||||
|
||||
|
@ -17,11 +37,10 @@ jobs:
|
|||
run: jq -s '[.[]]|flatten' lint-results/*.json &> lint-results/eslint_report.json
|
||||
|
||||
- name: Annotate Code Linting Results
|
||||
uses: ataylorme/eslint-annotate-action@v2
|
||||
uses: ataylorme/eslint-annotate-action@1.2.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
report-json: "lint-results/eslint_report.json"
|
||||
only-pr-files: false
|
||||
|
||||
- name: Upload ESLint report
|
||||
if: ${{ always() }}
|
||||
|
|
|
@ -1,95 +0,0 @@
|
|||
name: "Next.js Bundle Analysis"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Production build
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
uses: ./.github/workflows/production-build.yml
|
||||
secrets: inherit
|
||||
analyze:
|
||||
needs: build
|
||||
if: always()
|
||||
runs-on: buildjet-4vcpu-ubuntu-2204
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
- uses: ./.github/actions/yarn-install
|
||||
- uses: ./.github/actions/cache-build
|
||||
- name: Analyze bundle
|
||||
run: |
|
||||
cd apps/web
|
||||
npx -p nextjs-bundle-analysis@0.5.0 report
|
||||
|
||||
- name: Upload bundle
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: bundle
|
||||
path: apps/web/.next/analyze/__bundle_analysis.json
|
||||
|
||||
- name: Download base branch bundle stats
|
||||
uses: dawidd6/action-download-artifact@v2
|
||||
if: success() && github.event.number
|
||||
with:
|
||||
workflow: nextjs-bundle-analysis.yml
|
||||
branch: ${{ github.event.pull_request.base.ref }}
|
||||
path: apps/web/.next/analyze/base
|
||||
|
||||
# And here's the second place - this runs after we have both the current and
|
||||
# base branch bundle stats, and will compare them to determine what changed.
|
||||
# There are two configurable arguments that come from package.json:
|
||||
#
|
||||
# - budget: optional, set a budget (bytes) against which size changes are measured
|
||||
# it's set to 350kb here by default, as informed by the following piece:
|
||||
# https://infrequently.org/2021/03/the-performance-inequality-gap/
|
||||
#
|
||||
# - red-status-percentage: sets the percent size increase where you get a red
|
||||
# status indicator, defaults to 20%
|
||||
#
|
||||
# Either of these arguments can be changed or removed by editing the `nextBundleAnalysis`
|
||||
# entry in your package.json file.
|
||||
- name: Compare with base branch bundle
|
||||
if: success() && github.event.number
|
||||
run: |
|
||||
cd apps/web
|
||||
ls -laR .next/analyze/base && npx -p nextjs-bundle-analysis compare
|
||||
|
||||
- name: Get comment body
|
||||
id: get-comment-body
|
||||
if: success() && github.event.number
|
||||
run: |
|
||||
cd apps/web
|
||||
body=$(cat .next/analyze/__bundle_analysis_comment.txt)
|
||||
body="${body//'%'/'%25'}"
|
||||
body="${body//$'\n'/'%0A'}"
|
||||
body="${body//$'\r'/'%0D'}"
|
||||
echo ::set-output name=body::$body
|
||||
|
||||
- name: Find Comment
|
||||
uses: peter-evans/find-comment@v1
|
||||
if: success() && github.event.number
|
||||
id: fc
|
||||
with:
|
||||
issue-number: ${{ github.event.number }}
|
||||
body-includes: "<!-- __NEXTJS_BUNDLE_@calcom/web -->"
|
||||
|
||||
- name: Create Comment
|
||||
uses: peter-evans/create-or-update-comment@v1.4.4
|
||||
if: success() && github.event.number && steps.fc.outputs.comment-id == 0
|
||||
with:
|
||||
issue-number: ${{ github.event.number }}
|
||||
body: ${{ steps.get-comment-body.outputs.body }}
|
||||
|
||||
- name: Update Comment
|
||||
uses: peter-evans/create-or-update-comment@v1.4.4
|
||||
if: success() && github.event.number && steps.fc.outputs.comment-id != 0
|
||||
with:
|
||||
issue-number: ${{ github.event.number }}
|
||||
body: ${{ steps.get-comment-body.outputs.body }}
|
||||
comment-id: ${{ steps.fc.outputs.comment-id }}
|
||||
edit-mode: replace
|
|
@ -1,16 +0,0 @@
|
|||
name: Assign PR team labels
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
team-labels:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: equitybee/team-label-action@main
|
||||
with:
|
||||
repo-token: ${{ secrets.GH_ACCESS_TOKEN }}
|
||||
organization-name: calcom
|
||||
ignore-labels: "app-store, ai, authentication, automated-testing, billing, bookings, caldav, calendar-apps, ci, console, crm-apps, docs, documentation, emails, embeds, event-types, i18n, impersonation, manual-testing, ui, performance, ops-stack, organizations, public-api, routing-forms, seats, teams, webhooks, workflows, zapier"
|
|
@ -1,108 +0,0 @@
|
|||
name: PR Update
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches:
|
||||
- main
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
- ".github/CODEOWNERS"
|
||||
merge_group:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
changes:
|
||||
name: Detect changes
|
||||
runs-on: buildjet-4vcpu-ubuntu-2204
|
||||
permissions:
|
||||
pull-requests: read
|
||||
outputs:
|
||||
app-store: ${{ steps.filter.outputs.app-store }}
|
||||
embed: ${{ steps.filter.outputs.embed }}
|
||||
embed-react: ${{ steps.filter.outputs.embed-react }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
filters: |
|
||||
app-store:
|
||||
- 'apps/web/**'
|
||||
- 'packages/app-store/**'
|
||||
- 'playwright.config.ts'
|
||||
embed:
|
||||
- 'apps/web/**'
|
||||
- 'packages/embeds/**'
|
||||
- 'playwright.config.ts'
|
||||
embed-react:
|
||||
- 'apps/web/**'
|
||||
- 'packages/embeds/**'
|
||||
- 'playwright.config.ts'
|
||||
|
||||
type-check:
|
||||
name: Type check
|
||||
uses: ./.github/workflows/check-types.yml
|
||||
secrets: inherit
|
||||
|
||||
test:
|
||||
name: Unit tests
|
||||
uses: ./.github/workflows/test.yml
|
||||
secrets: inherit
|
||||
|
||||
lint:
|
||||
name: Linters
|
||||
uses: ./.github/workflows/lint.yml
|
||||
secrets: inherit
|
||||
|
||||
build:
|
||||
name: Production build
|
||||
uses: ./.github/workflows/production-build.yml
|
||||
secrets: inherit
|
||||
|
||||
build-without-database:
|
||||
name: Production build (without database)
|
||||
uses: ./.github/workflows/production-build-without-database.yml
|
||||
secrets: inherit
|
||||
|
||||
e2e:
|
||||
name: E2E tests
|
||||
needs: [changes, lint, build]
|
||||
uses: ./.github/workflows/e2e.yml
|
||||
secrets: inherit
|
||||
|
||||
e2e-app-store:
|
||||
name: E2E App Store tests
|
||||
needs: [changes, lint, build]
|
||||
uses: ./.github/workflows/e2e-app-store.yml
|
||||
secrets: inherit
|
||||
|
||||
e2e-embed:
|
||||
name: E2E embeds tests
|
||||
needs: [changes, lint, build]
|
||||
uses: ./.github/workflows/e2e-embed.yml
|
||||
secrets: inherit
|
||||
|
||||
e2e-embed-react:
|
||||
name: E2E React embeds tests
|
||||
needs: [changes, lint, build]
|
||||
uses: ./.github/workflows/e2e-embed-react.yml
|
||||
secrets: inherit
|
||||
|
||||
analyze:
|
||||
needs: build
|
||||
uses: ./.github/workflows/nextjs-bundle-analysis.yml
|
||||
secrets: inherit
|
||||
|
||||
required:
|
||||
needs: [lint, type-check, test, build, e2e, e2e-embed, e2e-embed-react, e2e-app-store]
|
||||
if: always()
|
||||
runs-on: buildjet-4vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: fail if conditional jobs failed
|
||||
if: contains(needs.*.result, 'failure') || contains(needs.*.result, 'skipped') || contains(needs.*.result, 'cancelled')
|
||||
run: exit 1
|
|
@ -1,41 +0,0 @@
|
|||
name: Production Build (without database)
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
env:
|
||||
ALLOWED_HOSTNAMES: ${{ vars.CI_ALLOWED_HOSTNAMES }}
|
||||
CALENDSO_ENCRYPTION_KEY: ${{ secrets.CI_CALENDSO_ENCRYPTION_KEY }}
|
||||
DATABASE_URL: ${{ secrets.CI_DATABASE_URL }}
|
||||
E2E_TEST_APPLE_CALENDAR_EMAIL: ${{ secrets.E2E_TEST_APPLE_CALENDAR_EMAIL }}
|
||||
E2E_TEST_APPLE_CALENDAR_PASSWORD: ${{ secrets.E2E_TEST_APPLE_CALENDAR_PASSWORD }}
|
||||
GOOGLE_API_CREDENTIALS: ${{ secrets.CI_GOOGLE_API_CREDENTIALS }}
|
||||
GOOGLE_LOGIN_ENABLED: ${{ vars.CI_GOOGLE_LOGIN_ENABLED }}
|
||||
NEXTAUTH_SECRET: ${{ secrets.CI_NEXTAUTH_SECRET }}
|
||||
NEXTAUTH_URL: ${{ secrets.CI_NEXTAUTH_URL }}
|
||||
NEXT_PUBLIC_IS_E2E: ${{ vars.CI_NEXT_PUBLIC_IS_E2E }}
|
||||
NEXT_PUBLIC_STRIPE_PUBLIC_KEY: ${{ secrets.CI_NEXT_PUBLIC_STRIPE_PUBLIC_KEY }}
|
||||
NEXT_PUBLIC_WEBAPP_URL: ${{ vars.CI_NEXT_PUBLIC_WEBAPP_URL }}
|
||||
NEXT_PUBLIC_WEBSITE_URL: ${{ vars.CI_NEXT_PUBLIC_WEBSITE_URL }}
|
||||
PAYMENT_FEE_FIXED: ${{ vars.CI_PAYMENT_FEE_FIXED }}
|
||||
PAYMENT_FEE_PERCENTAGE: ${{ vars.CI_PAYMENT_FEE_PERCENTAGE }}
|
||||
SAML_ADMINS: ${{ secrets.CI_SAML_ADMINS }}
|
||||
SAML_DATABASE_URL: ${{ secrets.CI_SAML_DATABASE_URL }}
|
||||
STRIPE_PRIVATE_KEY: ${{ secrets.CI_STRIPE_PRIVATE_KEY }}
|
||||
STRIPE_CLIENT_ID: ${{ secrets.CI_STRIPE_CLIENT_ID }}
|
||||
STRIPE_WEBHOOK_SECRET: ${{ secrets.CI_STRIPE_WEBHOOK_SECRET }}
|
||||
SENDGRID_API_KEY: ${{ secrets.CI_SENDGRID_API_KEY }}
|
||||
SENDGRID_EMAIL: ${{ secrets.CI_SENDGRID_EMAIL }}
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
- uses: ./.github/actions/yarn-install
|
||||
- uses: ./.github/actions/cache-build
|
|
@ -1,50 +0,0 @@
|
|||
name: Production Build
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
env:
|
||||
ALLOWED_HOSTNAMES: ${{ vars.CI_ALLOWED_HOSTNAMES }}
|
||||
CALENDSO_ENCRYPTION_KEY: ${{ secrets.CI_CALENDSO_ENCRYPTION_KEY }}
|
||||
DATABASE_URL: ${{ secrets.CI_DATABASE_URL }}
|
||||
E2E_TEST_APPLE_CALENDAR_EMAIL: ${{ secrets.E2E_TEST_APPLE_CALENDAR_EMAIL }}
|
||||
E2E_TEST_APPLE_CALENDAR_PASSWORD: ${{ secrets.E2E_TEST_APPLE_CALENDAR_PASSWORD }}
|
||||
GOOGLE_API_CREDENTIALS: ${{ secrets.CI_GOOGLE_API_CREDENTIALS }}
|
||||
GOOGLE_LOGIN_ENABLED: ${{ vars.CI_GOOGLE_LOGIN_ENABLED }}
|
||||
NEXTAUTH_SECRET: ${{ secrets.CI_NEXTAUTH_SECRET }}
|
||||
NEXTAUTH_URL: ${{ secrets.CI_NEXTAUTH_URL }}
|
||||
NEXT_PUBLIC_IS_E2E: ${{ vars.CI_NEXT_PUBLIC_IS_E2E }}
|
||||
NEXT_PUBLIC_STRIPE_PUBLIC_KEY: ${{ secrets.CI_NEXT_PUBLIC_STRIPE_PUBLIC_KEY }}
|
||||
NEXT_PUBLIC_WEBAPP_URL: ${{ vars.CI_NEXT_PUBLIC_WEBAPP_URL }}
|
||||
NEXT_PUBLIC_WEBSITE_URL: ${{ vars.CI_NEXT_PUBLIC_WEBSITE_URL }}
|
||||
PAYMENT_FEE_FIXED: ${{ vars.CI_PAYMENT_FEE_FIXED }}
|
||||
PAYMENT_FEE_PERCENTAGE: ${{ vars.CI_PAYMENT_FEE_PERCENTAGE }}
|
||||
SAML_ADMINS: ${{ secrets.CI_SAML_ADMINS }}
|
||||
SAML_DATABASE_URL: ${{ secrets.CI_SAML_DATABASE_URL }}
|
||||
STRIPE_PRIVATE_KEY: ${{ secrets.CI_STRIPE_PRIVATE_KEY }}
|
||||
STRIPE_CLIENT_ID: ${{ secrets.CI_STRIPE_CLIENT_ID }}
|
||||
STRIPE_WEBHOOK_SECRET: ${{ secrets.CI_STRIPE_WEBHOOK_SECRET }}
|
||||
SENDGRID_API_KEY: ${{ secrets.CI_SENDGRID_API_KEY }}
|
||||
SENDGRID_EMAIL: ${{ secrets.CI_SENDGRID_EMAIL }}
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
runs-on: buildjet-4vcpu-ubuntu-2204
|
||||
timeout-minutes: 30
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:12.1
|
||||
env:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_DB: calendso
|
||||
ports:
|
||||
- 5432:5432
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
- uses: ./.github/actions/yarn-install
|
||||
- uses: ./.github/actions/cache-db
|
||||
- uses: ./.github/actions/cache-build
|
|
@ -1,47 +0,0 @@
|
|||
name: "Release Docker"
|
||||
|
||||
on: # yamllint disable-line rule:truthy
|
||||
release:
|
||||
types:
|
||||
- created
|
||||
# in case manual trigger is needed
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
RELEASE_TAG:
|
||||
description: "v{Major}.{Minor}.{Patch}"
|
||||
|
||||
jobs:
|
||||
release:
|
||||
name: "Remote Release"
|
||||
|
||||
runs-on: "ubuntu-latest"
|
||||
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: "Determine tag"
|
||||
run: 'echo "RELEASE_TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV'
|
||||
|
||||
- name: "Run remote release workflow"
|
||||
uses: "actions/github-script@v6"
|
||||
with:
|
||||
# Requires a personal access token with Actions Read and write permissions on calcom/docker.
|
||||
github-token: "${{ secrets.DOCKER_REPO_ACCESS_TOKEN }}"
|
||||
script: |
|
||||
try {
|
||||
const response = await github.rest.actions.createWorkflowDispatch({
|
||||
owner: context.repo.owner,
|
||||
repo: 'docker',
|
||||
workflow_id: 'create-release.yaml',
|
||||
ref: 'main',
|
||||
inputs: {
|
||||
"RELEASE_TAG": process.env.RELEASE_TAG
|
||||
},
|
||||
});
|
||||
|
||||
console.log(response);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
core.setFailed(error.message);
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
# Pattern matched against refs/tags
|
||||
tags:
|
||||
- "*" # Push events to every tag not containing /
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: main # Always checkout main even for tagged releases
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GH_ACCESS_TOKEN }}
|
||||
- run: git push origin +main:production
|
|
@ -1,48 +0,0 @@
|
|||
name: "Validate PRs"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
- edited
|
||||
- synchronize
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
validate-pr:
|
||||
name: Validate PR title
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: amannn/action-semantic-pull-request@v5
|
||||
id: lint_pr_title
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: marocchino/sticky-pull-request-comment@v2
|
||||
# When the previous steps fails, the workflow would stop. By adding this
|
||||
# condition you can continue the execution with the populated error message.
|
||||
if: always() && (steps.lint_pr_title.outputs.error_message != null)
|
||||
with:
|
||||
header: pr-title-lint-error
|
||||
message: |
|
||||
Hey there and thank you for opening this pull request! 👋🏼
|
||||
|
||||
We require pull request titles to follow the [Conventional Commits specification](https://www.conventionalcommits.org/en/v1.0.0/) and it looks like your proposed title needs to be adjusted.
|
||||
|
||||
Details:
|
||||
|
||||
```
|
||||
${{ steps.lint_pr_title.outputs.error_message }}
|
||||
```
|
||||
|
||||
# Delete a previous comment when the issue has been resolved
|
||||
- if: ${{ steps.lint_pr_title.outputs.error_message == null }}
|
||||
uses: marocchino/sticky-pull-request-comment@v2
|
||||
with:
|
||||
header: pr-title-lint-error
|
||||
message: |
|
||||
Thank you for following the naming conventions! 🙏 Feel free to join our [discord](https://go.cal.com/discord) and post your PR link to [collect XP and win prizes!](https://cal.com/blog/community-incentives)
|
||||
|
|
@ -1,21 +1,21 @@
|
|||
name: Submodule Sync
|
||||
on:
|
||||
schedule:
|
||||
# Runs "At minute 15 past every 4th hour." (see https://crontab.guru)
|
||||
- cron: "15 */4 * * *"
|
||||
workflow_dispatch: ~
|
||||
|
||||
jobs:
|
||||
submodule-sync:
|
||||
name: Submodule update
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
- name: run action
|
||||
uses: releasehub-com/github-action-create-pr-parent-submodule@v1
|
||||
with:
|
||||
submodules: recursive
|
||||
token: ${{ secrets.GH_ACCESS_TOKEN }}
|
||||
- name: Commit
|
||||
run: |
|
||||
git config user.email "actions@github.com"
|
||||
git config user.name "github-actions"
|
||||
git commit -am "Auto updated submodule references" && git push || echo "No changes to commit"
|
||||
github_token: ${{ secrets.GH_ACCESS_TOKEN }}
|
||||
parent_repository: "calcom/cal.com"
|
||||
checkout_branch: "main"
|
||||
pr_against_branch: "main"
|
||||
owner: "calcom"
|
||||
|
|
|
@ -1,19 +0,0 @@
|
|||
name: Unit tests
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_run:
|
||||
workflows: [Crowdin Action]
|
||||
types: [completed]
|
||||
jobs:
|
||||
test:
|
||||
timeout-minutes: 20
|
||||
runs-on: buildjet-4vcpu-ubuntu-2204
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
- run: echo 'NODE_OPTIONS="--max_old_space_size=6144"' >> $GITHUB_ENV
|
||||
- uses: ./.github/actions/yarn-install
|
||||
# Should be an 8GB machine as per https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners
|
||||
- run: yarn test
|
||||
# We could add different timezones here that we need to run our tests in
|
||||
- run: TZ=America/Los_Angeles yarn test -- --timeZoneDependentTestsOnly
|
|
@ -1,27 +0,0 @@
|
|||
name: "Welcome new contributors"
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: opened
|
||||
pull_request:
|
||||
types: opened
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
welcome-message:
|
||||
name: Welcoming New Users
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
if: github.event.action == 'opened'
|
||||
steps:
|
||||
- uses: actions/first-interaction@v1
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
pr-message: |-
|
||||
Thank you for making your first Pull Request and taking the time to improve Cal.com ! ❤️🎉
|
||||
Feel free to join our [discord](https://go.cal.com/discord) and post your PR link to [collect XP and win prizes!](https://cal.com/blog/community-incentives)
|
||||
issue-message: |
|
||||
Thank you for opening your first issue, one of our team members will review it as soon as it possible. ❤️🎉
|
|
@ -1,14 +0,0 @@
|
|||
name: Yarn install
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
setup:
|
||||
name: Yarn install & cache
|
||||
runs-on: buildjet-4vcpu-ubuntu-2204
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/dangerous-git-checkout
|
||||
- uses: ./.github/actions/yarn-install
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
# .env file
|
||||
.env
|
||||
!packages/prisma/.env
|
||||
|
||||
# dependencies
|
||||
node_modules
|
||||
|
@ -65,34 +64,11 @@ tsconfig.tsbuildinfo
|
|||
# turbo
|
||||
.turbo
|
||||
|
||||
# Prisma generated files
|
||||
# Prisma-Zod
|
||||
packages/prisma/zod/*.ts
|
||||
packages/prisma/enums
|
||||
|
||||
# Builds
|
||||
dist
|
||||
|
||||
# Linting
|
||||
lint-results
|
||||
|
||||
#Storybook
|
||||
apps/storybook/build-storybook.log
|
||||
|
||||
# Snaplet
|
||||
.snaplet/snapshots
|
||||
.snaplet/structure.d.ts
|
||||
|
||||
# Submodules
|
||||
.gitmodules
|
||||
apps/website
|
||||
apps/console
|
||||
apps/auth
|
||||
|
||||
# Yarn Modern
|
||||
.pnp.*
|
||||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/plugins
|
||||
!.yarn/releases
|
||||
!.yarn/sdks
|
||||
!.yarn/versions
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
[submodule "apps/admin"]
|
||||
path = apps/admin
|
||||
url = https://github.com/calcom/admin.git
|
||||
branch = main
|
||||
[submodule "apps/api"]
|
||||
path = apps/api
|
||||
url = https://github.com/calcom/api.git
|
||||
branch = main
|
||||
[submodule "apps/website"]
|
||||
path = apps/website
|
||||
url = https://github.com/calcom/website.git
|
||||
branch = main
|
45
.gitpod.yml
45
.gitpod.yml
|
@ -1,45 +0,0 @@
|
|||
tasks:
|
||||
- init: |
|
||||
yarn &&
|
||||
cp .env.example .env &&
|
||||
next_auth_secret=$(openssl rand -base64 32) &&
|
||||
calendso_encryption_key=$(openssl rand -base64 24) &&
|
||||
sed -i -e "s|^NEXTAUTH_SECRET=.*|NEXTAUTH_SECRET=$next_auth_secret|" \
|
||||
-e "s|^CALENDSO_ENCRYPTION_KEY=.*|CALENDSO_ENCRYPTION_KEY=$calendso_encryption_key|" \
|
||||
-e "s|http://localhost:3000|https://localhost:3000|" \
|
||||
-e "s|localhost:3000|3000-$GITPOD_WORKSPACE_ID.$GITPOD_WORKSPACE_CLUSTER_HOST|" .env
|
||||
command: yarn dx
|
||||
|
||||
ports:
|
||||
- port: 3000
|
||||
visibility: public
|
||||
onOpen: open-preview
|
||||
- port: 5420
|
||||
visibility: private
|
||||
onOpen: ignore
|
||||
- port: 1025
|
||||
visibility: private
|
||||
onOpen: ignore
|
||||
- port: 8025
|
||||
visibility: private
|
||||
onOpen: ignore
|
||||
|
||||
github:
|
||||
prebuilds:
|
||||
master: true
|
||||
pullRequests: true
|
||||
pullRequestsFromForks: true
|
||||
addCheck: true
|
||||
addComment: true
|
||||
addBadge: true
|
||||
|
||||
vscode:
|
||||
extensions:
|
||||
- DavidAnson.vscode-markdownlint
|
||||
- yzhang.markdown-all-in-one
|
||||
- esbenp.prettier-vscode
|
||||
- dbaeumer.vscode-eslint
|
||||
- bradlc.vscode-tailwindcss
|
||||
- ban.spellright
|
||||
- stripe.vscode-stripe
|
||||
- Prisma.prisma
|
|
@ -0,0 +1 @@
|
|||
_
|
|
@ -2,5 +2,3 @@
|
|||
. "$(dirname "$0")/_/husky.sh"
|
||||
|
||||
yarn lint-staged
|
||||
|
||||
yarn app-store:build && git add packages/app-store/*.generated.*
|
||||
|
|
|
@ -4,4 +4,5 @@ version = 1
|
|||
autoupdate_label = "♻️ autoupdate"
|
||||
|
||||
[approve]
|
||||
auto_approve_usernames = ["dependabot", "github-actions"]
|
||||
auto_approve_usernames = ["dependabot"]
|
||||
|
||||
|
|
|
@ -14,5 +14,3 @@ public
|
|||
.DS_Store
|
||||
.eslintignore
|
||||
packages/prisma/zod
|
||||
packages/prisma/enums
|
||||
apps/web/public/embed
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
{
|
||||
"projectId": "cl4u26bwz7962859ply7ibuo43t",
|
||||
"targetDatabaseUrl": "postgresql://postgres@localhost:5450/calendso"
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
{
|
||||
"version": "0.22.3"
|
||||
}
|
|
@ -1,97 +0,0 @@
|
|||
// This transform config was generated by Snaplet.
|
||||
// Snaplet found fields that may contain personally identifiable information (PII)
|
||||
// and used that to populate this file.
|
||||
import { copycat as c } from "@snaplet/copycat";
|
||||
|
||||
import type { Transform } from "./structure";
|
||||
|
||||
function hasStringProp<T extends string>(x: unknown, key: T): x is { [key in T]: string } {
|
||||
return !!x && typeof x === "object" && key in x;
|
||||
}
|
||||
|
||||
function replaceKeyIfExists<T extends string>(x: object, key: T) {
|
||||
if (hasStringProp(x, key)) {
|
||||
return { ...x, [key]: c.uuid(x[key]) };
|
||||
}
|
||||
return x;
|
||||
}
|
||||
|
||||
function generateSlug(x: string) {
|
||||
return c.words(x, { max: 3 }).split(" ").join("-");
|
||||
}
|
||||
|
||||
function replaceSensitiveKeys(record: object) {
|
||||
return {
|
||||
...record,
|
||||
...replaceKeyIfExists(record, "client_id"),
|
||||
...replaceKeyIfExists(record, "client_secret"),
|
||||
...replaceKeyIfExists(record, "public_key"),
|
||||
...replaceKeyIfExists(record, "api_key"),
|
||||
...replaceKeyIfExists(record, "signing_secret"),
|
||||
...replaceKeyIfExists(record, "access_token"),
|
||||
...replaceKeyIfExists(record, "refresh_token"),
|
||||
...replaceKeyIfExists(record, "stripe_user_id"),
|
||||
...replaceKeyIfExists(record, "stripe_publishable_key"),
|
||||
...replaceKeyIfExists(record, "accessToken"),
|
||||
...replaceKeyIfExists(record, "refreshToken"),
|
||||
...replaceKeyIfExists(record, "bot_user_id"),
|
||||
...replaceKeyIfExists(record, "app_id"),
|
||||
};
|
||||
}
|
||||
|
||||
const generateUsername = (x: string) => `${c.firstName(x)}-${c.lastName(x)}${c.int(x, { min: 2, max: 99 })}`;
|
||||
|
||||
const config: Transform = () => ({
|
||||
public: {
|
||||
ApiKey: ({ row }) => ({
|
||||
hashedKey: c.uuid(row.hashedKey),
|
||||
}),
|
||||
App: ({ row }) => ({
|
||||
keys: replaceSensitiveKeys(row.keys),
|
||||
}),
|
||||
Attendee: ({ row }) => ({
|
||||
email: c.email(row.email),
|
||||
name: c.fullName(row.name),
|
||||
timeZone: c.timezone(row.timeZone),
|
||||
}),
|
||||
Credential: ({ row }) => ({
|
||||
key: typeof row.key === "string" ? c.uuid(row.key) : replaceSensitiveKeys(row.key),
|
||||
}),
|
||||
EventType: ({ row }) => ({
|
||||
slug: generateSlug(row.slug),
|
||||
timeZone: c.timezone(row.timeZone),
|
||||
eventName: c.words(row.eventName, { max: 3 }),
|
||||
}),
|
||||
ResetPasswordRequest: ({ row }) => ({
|
||||
email: c.email(row.email),
|
||||
}),
|
||||
Schedule: ({ row }) => ({
|
||||
timeZone: c.timezone(row.timeZone),
|
||||
}),
|
||||
Team: ({ row }) => ({
|
||||
bio: c.sentence(row.bio),
|
||||
name: c.words(row.name, { max: 2 }),
|
||||
slug: generateSlug(row.slug),
|
||||
}),
|
||||
users: ({ row }) =>
|
||||
row.role !== "ADMIN"
|
||||
? {
|
||||
bio: c.sentence(row.bio),
|
||||
email: c.email(row.email),
|
||||
name: c.fullName(row.name),
|
||||
password: c.password(row.password),
|
||||
timeZone: c.timezone(row.timeZone),
|
||||
username: generateUsername(row.username),
|
||||
}
|
||||
: row,
|
||||
VerificationToken: ({ row }) => ({
|
||||
token: c.uuid(row.token),
|
||||
}),
|
||||
Account: ({ row }) => ({
|
||||
access_token: c.uuid(row.access_token),
|
||||
refresh_token: c.uuid(row.refresh_token),
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
export default config;
|
|
@ -7,8 +7,6 @@
|
|||
"bradlc.vscode-tailwindcss", // hinting / autocompletion for tailwind
|
||||
"ban.spellright", // Spell check for docs
|
||||
"stripe.vscode-stripe", // stripe VSCode extension
|
||||
"Prisma.prisma", // syntax|format|completion for prisma
|
||||
"rebornix.project-snippets", // Share useful snippets between collaborators
|
||||
"inlang.vs-code-extension" // improved i18n DX
|
||||
"Prisma.prisma" // syntax|format|completion for prisma
|
||||
]
|
||||
}
|
||||
|
|
|
@ -2,20 +2,38 @@
|
|||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "node",
|
||||
"name": "Next.js: Server",
|
||||
"type": "node-terminal",
|
||||
"request": "launch",
|
||||
"name": "Next.js Node Debug",
|
||||
"runtimeExecutable": "${workspaceFolder}/node_modules/next/dist/bin/next",
|
||||
"env": {
|
||||
"NODE_OPTIONS": "--inspect"
|
||||
},
|
||||
"cwd": "${workspaceFolder}/apps/web",
|
||||
"command": "npm run dev",
|
||||
"skipFiles": ["<node_internals>/**"],
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/**/*.js",
|
||||
"!**/node_modules/**"
|
||||
],
|
||||
"sourceMaps": true,
|
||||
"resolveSourceMapLocations": [
|
||||
"${workspaceFolder}/**",
|
||||
"!**/node_modules/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Next.js: Client",
|
||||
"type": "pwa-chrome",
|
||||
"request": "launch",
|
||||
"url": "http://localhost:3000"
|
||||
},
|
||||
{
|
||||
"name": "Next.js: Full Stack",
|
||||
"type": "node-terminal",
|
||||
"request": "launch",
|
||||
"command": "npm run dev",
|
||||
"console": "integratedTerminal",
|
||||
"sourceMapPathOverrides": {
|
||||
"meteor://💻app/*": "${workspaceFolder}/*",
|
||||
"webpack:///./~/*": "${workspaceFolder}/node_modules/*",
|
||||
"webpack://?:*/*": "${workspaceFolder}/*"
|
||||
"serverReadyAction": {
|
||||
"pattern": "started server on .+, url: (https?://.+)",
|
||||
"uriFormat": "%s",
|
||||
"action": "debugWithChrome"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
|
@ -6,6 +6,5 @@
|
|||
},
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"spellright.language": ["en"],
|
||||
"spellright.documentTypes": ["markdown", "typescript", "typescriptreact"],
|
||||
"tailwindCSS.experimental.classRegex": [["cva\\(([^)]*)\\)", "[\"'`]([^\"'`]*).*?[\"'`]"]]
|
||||
"spellright.documentTypes": ["markdown"]
|
||||
}
|
||||
|
|
|
@ -1,74 +0,0 @@
|
|||
{
|
||||
"version": "2.0.0",
|
||||
"presentation": {
|
||||
"echo": false,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "dedicated",
|
||||
"showReuseMessage": true
|
||||
},
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Launch Cal.com Development terminals",
|
||||
"dependsOn": [
|
||||
"Web App(3000)",
|
||||
"Website(3001)",
|
||||
"Embed Core(3100)",
|
||||
"Embed React(3101)",
|
||||
"Prisma Studio(5555)",
|
||||
"Maildev(587)"
|
||||
],
|
||||
// Mark as the default build task so cmd/ctrl+shift+b will create them
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
}
|
||||
},
|
||||
{
|
||||
// The name that shows up in terminal tab
|
||||
"label": "Web App(3000)",
|
||||
// The task will launch a shell
|
||||
"type": "shell",
|
||||
"command": "yarn dev",
|
||||
// Set the shell type
|
||||
// Mark as a background task to avoid the spinner animation on the terminal tab
|
||||
"isBackground": false,
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Website(3001)",
|
||||
"type": "shell",
|
||||
"command": "cd apps/website && yarn dev",
|
||||
"isBackground": false,
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Embed Core(3100)",
|
||||
"type": "shell",
|
||||
"command": "cd packages/embeds/embed-core && yarn dev",
|
||||
"isBackground": false,
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Embed React(3101)",
|
||||
"type": "shell",
|
||||
"command": "cd packages/embeds/embed-react && yarn dev",
|
||||
"isBackground": false,
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Prisma Studio(5555)",
|
||||
"type": "shell",
|
||||
"command": "yarn db-studio",
|
||||
"isBackground": false,
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Maildev(587)",
|
||||
"type": "shell",
|
||||
"command": "maildev -s 587",
|
||||
"isBackground": false,
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1,15 +0,0 @@
|
|||
diff --git a/index.cjs b/index.cjs
|
||||
index b645707a3549fc298508726e404243499bbed499..f34b0891e99b275a9218e253f303f43d31ef3f73 100644
|
||||
--- a/index.cjs
|
||||
+++ b/index.cjs
|
||||
@@ -13,8 +13,8 @@ function withMetadataArgument(func, _arguments) {
|
||||
// https://github.com/babel/babel/issues/2212#issuecomment-131827986
|
||||
// An alternative approach:
|
||||
// https://www.npmjs.com/package/babel-plugin-add-module-exports
|
||||
-exports = module.exports = min.parsePhoneNumberFromString
|
||||
-exports['default'] = min.parsePhoneNumberFromString
|
||||
+// exports = module.exports = min.parsePhoneNumberFromString
|
||||
+// exports['default'] = min.parsePhoneNumberFromString
|
||||
|
||||
// `parsePhoneNumberFromString()` named export is now considered legacy:
|
||||
// it has been promoted to a default export due to being too verbose.
|
|
@ -1,26 +0,0 @@
|
|||
diff --git a/dist/commonjs/serverSideTranslations.js b/dist/commonjs/serverSideTranslations.js
|
||||
index bcad3d02fbdfab8dacb1d85efd79e98623a0c257..fff668f598154a13c4030d1b4a90d5d9c18214ad 100644
|
||||
--- a/dist/commonjs/serverSideTranslations.js
|
||||
+++ b/dist/commonjs/serverSideTranslations.js
|
||||
@@ -36,7 +36,6 @@ var _fs = _interopRequireDefault(require("fs"));
|
||||
var _path = _interopRequireDefault(require("path"));
|
||||
var _createConfig = require("./config/createConfig");
|
||||
var _node = _interopRequireDefault(require("./createClient/node"));
|
||||
-var _appWithTranslation = require("./appWithTranslation");
|
||||
var _utils = require("./utils");
|
||||
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
|
||||
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { (0, _defineProperty2["default"])(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
|
||||
@@ -110,12 +109,8 @@ var serverSideTranslations = /*#__PURE__*/function () {
|
||||
lng: initialLocale
|
||||
}));
|
||||
localeExtension = config.localeExtension, localePath = config.localePath, fallbackLng = config.fallbackLng, reloadOnPrerender = config.reloadOnPrerender;
|
||||
- if (!reloadOnPrerender) {
|
||||
- _context.next = 18;
|
||||
- break;
|
||||
- }
|
||||
_context.next = 18;
|
||||
- return _appWithTranslation.globalI18n === null || _appWithTranslation.globalI18n === void 0 ? void 0 : _appWithTranslation.globalI18n.reloadResources();
|
||||
+ return void 0;
|
||||
case 18:
|
||||
_createClient = (0, _node["default"])(_objectSpread(_objectSpread({}, config), {}, {
|
||||
lng: initialLocale
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1,9 +0,0 @@
|
|||
nodeLinker: node-modules
|
||||
|
||||
plugins:
|
||||
- path: .yarn/plugins/@yarnpkg/plugin-interactive-tools.cjs
|
||||
spec: "@yarnpkg/plugin-interactive-tools"
|
||||
- path: .yarn/plugins/@yarnpkg/plugin-workspace-tools.cjs
|
||||
spec: "@yarnpkg/plugin-workspace-tools"
|
||||
|
||||
yarnPath: .yarn/releases/yarn-3.4.1.cjs
|
|
@ -17,23 +17,23 @@ diverse, inclusive, and healthy community.
|
|||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
- Demonstrating empathy and kindness toward other people
|
||||
- Being respectful of differing opinions, viewpoints, and experiences
|
||||
- Giving and gracefully accepting constructive feedback
|
||||
- Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
- Focusing on what is best not just for us as individuals, but for the
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
- The use of sexualized language or imagery, and sexual attention or
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
- Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
- Public or private harassment
|
||||
- Publishing others' private information, such as a physical or email
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
- Other conduct which could reasonably be considered inappropriate in a
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
@ -106,7 +106,7 @@ Violating these terms may lead to a permanent ban.
|
|||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within
|
||||
|
|
139
CONTRIBUTING.md
139
CONTRIBUTING.md
|
@ -1,62 +1,9 @@
|
|||
# Contributing to Cal.com
|
||||
|
||||
Contributions are what makes the open source community such an amazing place to learn, inspire, and create. Any contributions you make are **greatly appreciated**.
|
||||
Contributions are what make the open source community such an amazing place to be learn, inspire, and create. Any contributions you make are **greatly appreciated**.
|
||||
|
||||
- Before jumping into a PR be sure to search [existing PRs](https://github.com/calcom/cal.com/pulls) or [issues](https://github.com/calcom/cal.com/issues) for an open or closed item that relates to your submission.
|
||||
|
||||
## Priorities
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<td>
|
||||
Type of Issue
|
||||
</td>
|
||||
<td>
|
||||
Priority
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
Minor improvements, non-core feature requests
|
||||
</td>
|
||||
<td>
|
||||
<a href="https://github.com/calcom/cal.com/issues?q=is:issue+is:open+sort:updated-desc+label:%22Low+priority%22">
|
||||
<img src="https://img.shields.io/badge/-Low%20Priority-green">
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
Confusing UX (... but working)
|
||||
</td>
|
||||
<td>
|
||||
<a href="https://github.com/calcom/cal.com/issues?q=is:issue+is:open+sort:updated-desc+label:%22Medium+priority%22">
|
||||
<img src="https://img.shields.io/badge/-Medium%20Priority-yellow">
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
Core Features (Booking page, availability, timezone calculation)
|
||||
</td>
|
||||
<td>
|
||||
<a href="https://github.com/calcom/cal.com/issues?q=is:issue+is:open+sort:updated-desc+label:%22High+priority%22">
|
||||
<img src="https://img.shields.io/badge/-High%20Priority-orange">
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
Core Bugs (Login, Booking page, Emails are not working)
|
||||
</td>
|
||||
<td>
|
||||
<a href="https://github.com/calcom/cal.com/issues?q=is:issue+is:open+sort:updated-desc+label:Urgent">
|
||||
<img src="https://img.shields.io/badge/-Urgent-red">
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
## Developing
|
||||
|
||||
The development branch is `main`. This is the branch that all pull
|
||||
|
@ -86,28 +33,7 @@ To develop locally:
|
|||
yarn
|
||||
```
|
||||
|
||||
5. Set up your `.env` file:
|
||||
|
||||
- Duplicate `.env.example` to `.env`.
|
||||
- Use `openssl rand -base64 32` to generate a key and add it under `NEXTAUTH_SECRET` in the `.env` file.
|
||||
- Use `openssl rand -base64 24` to generate a key and add it under `CALENDSO_ENCRYPTION_KEY` in the `.env` file.
|
||||
|
||||
6. Setup Node
|
||||
If your Node version does not meet the project's requirements as instructed by the docs, "nvm" (Node Version Manager) allows using Node at the version required by the project:
|
||||
|
||||
```sh
|
||||
nvm use
|
||||
```
|
||||
|
||||
You first might need to install the specific version and then use it:
|
||||
|
||||
```sh
|
||||
nvm install && nvm use
|
||||
```
|
||||
|
||||
You can install nvm from [here](https://github.com/nvm-sh/nvm).
|
||||
|
||||
7. Start developing and watch for code changes:
|
||||
5. Start developing and watch for code changes:
|
||||
|
||||
```sh
|
||||
yarn dev
|
||||
|
@ -135,16 +61,6 @@ This will run and test all flows in multiple Chromium windows to verify that no
|
|||
yarn test-e2e
|
||||
```
|
||||
|
||||
#### Resolving issues
|
||||
|
||||
##### E2E test browsers not installed
|
||||
|
||||
Run `npx playwright install` to download test browsers and resolve the error below when running `yarn test-e2e`:
|
||||
|
||||
```
|
||||
Executable doesn't exist at /Users/alice/Library/Caches/ms-playwright/chromium-1048/chrome-mac/Chromium.app/Contents/MacOS/Chromium
|
||||
```
|
||||
|
||||
## Linting
|
||||
|
||||
To check the formatting of your code:
|
||||
|
@ -157,52 +73,7 @@ If you get errors, be sure to fix them before committing.
|
|||
|
||||
## Making a Pull Request
|
||||
|
||||
- Be sure to [check the "Allow edits from maintainers" option](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/allowing-changes-to-a-pull-request-branch-created-from-a-fork) while creating your PR.
|
||||
- If your PR refers to or fixes an issue, be sure to add `refs #XXX` or `fixes #XXX` to the PR description. Replacing `XXX` with the respective issue number. See more about [Linking a pull request to an issue](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue).
|
||||
- Be sure to [check the "Allow edits from maintainers" option](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/allowing-changes-to-a-pull-request-branch-created-from-a-fork) while creating you PR.
|
||||
- If your PR refers to or fixes an issue, be sure to add `refs #XXX` or `fixes #XXX` to the PR description. Replacing `XXX` with the respective issue number. Se more about [Linking a pull request to an issue
|
||||
](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue).
|
||||
- Be sure to fill the PR Template accordingly.
|
||||
- Review [App Contribution Guidelines](./packages/app-store/CONTRIBUTING.md) when building integrations
|
||||
|
||||
## Guidelines for committing yarn lockfile
|
||||
|
||||
Do not commit your `yarn.lock` unless you've made changes to the `package.json`. If you've already committed `yarn.lock` unintentionally, follow these steps to undo:
|
||||
|
||||
If your last commit has the `yarn.lock` file alongside other files and you only wish to uncommit the `yarn.lock`:
|
||||
```bash
|
||||
git checkout HEAD~1 yarn.lock
|
||||
git commit -m "Revert yarn.lock changes"
|
||||
```
|
||||
If you've pushed the commit with the `yarn.lock`:
|
||||
1. Correct the commit locally using the above method.
|
||||
2. Carefully force push:
|
||||
|
||||
```bash
|
||||
git push origin <your-branch-name> --force
|
||||
```
|
||||
|
||||
If `yarn.lock` was committed a while ago and there have been several commits since, you can use the following steps to revert just the `yarn.lock` changes without impacting the subsequent changes:
|
||||
|
||||
1. **Checkout a Previous Version**:
|
||||
- Find the commit hash before the `yarn.lock` was unintentionally committed. You can do this by viewing the Git log:
|
||||
```bash
|
||||
git log yarn.lock
|
||||
```
|
||||
- Once you have identified the commit hash, use it to checkout the previous version of `yarn.lock`:
|
||||
```bash
|
||||
git checkout <commit_hash> yarn.lock
|
||||
```
|
||||
|
||||
2. **Commit the Reverted Version**:
|
||||
- After checking out the previous version of the `yarn.lock`, commit this change:
|
||||
```bash
|
||||
git commit -m "Revert yarn.lock to its state before unintended changes"
|
||||
```
|
||||
|
||||
3. **Proceed with Caution**:
|
||||
- If you need to push this change, first pull the latest changes from your remote branch to ensure you're not overwriting other recent changes:
|
||||
```bash
|
||||
git pull origin <your-branch-name>
|
||||
```
|
||||
- Then push the updated branch:
|
||||
```bash
|
||||
git push origin <your-branch-name>
|
||||
```
|
||||
|
|
2
LICENSE
2
LICENSE
|
@ -2,7 +2,7 @@ Copyright (c) 2020-present Cal.com, Inc.
|
|||
|
||||
Portions of this software are licensed as follows:
|
||||
|
||||
* All content that resides under https://github.com/calcom/cal.com/tree/main/packages/features/ee and https://github.com/calcom/cal.com/tree/main/apps/api/ directory of this repository (Commercial License) is licensed under the license defined in "ee/LICENSE".
|
||||
* All content that resides under "apps/web/ee/" (https://github.com/calcom/cal.com/tree/main/apps/web/ee) directory of this repository (Enterprise Edition) is licensed under the license defined in "ee/LICENSE".
|
||||
* All third party components incorporated into the Cal.com Software are licensed under the original license provided by the owner of the applicable component.
|
||||
* Content outside of the above mentioned directories or restrictions above is available under the "AGPLv3" license as defined below.
|
||||
|
||||
|
|
57
SECURITY.md
57
SECURITY.md
|
@ -1,57 +0,0 @@
|
|||
# Security
|
||||
|
||||
Contact: [security@cal.com](mailto:security@cal.com)
|
||||
|
||||
Based on [https://supabase.com/.well-known/security.txt](https://supabase.com/.well-known/security.txt)
|
||||
|
||||
At Cal.com, we consider the security of our systems a top priority. But no
|
||||
matter how much effort we put into system security, there can still be
|
||||
vulnerabilities present.
|
||||
|
||||
If you discover a vulnerability, we would like to know about it so we can take
|
||||
steps to address it as quickly as possible. We would like to ask you to help us
|
||||
better protect our clients and our systems.
|
||||
|
||||
## Out of scope vulnerabilities
|
||||
|
||||
- Clickjacking on pages with no sensitive actions.
|
||||
- Unauthenticated/logout/login CSRF.
|
||||
- Attacks requiring MITM or physical access to a user's device.
|
||||
- Any activity that could lead to the disruption of our service (DoS).
|
||||
- Content spoofing and text injection issues without showing an attack
|
||||
vector/without being able to modify HTML/CSS.
|
||||
- Email spoofing
|
||||
- Missing DNSSEC, CAA, CSP headers
|
||||
- Lack of Secure or HTTP only flag on non-sensitive cookies
|
||||
- Deadlinks
|
||||
|
||||
## Please do the following
|
||||
|
||||
- E-mail your findings to [security@cal.com](mailto:security@cal.com).
|
||||
- Do not run automated scanners on our infrastructure or dashboard. If you wish
|
||||
to do this, contact us and we will set up a sandbox for you.
|
||||
- Do not take advantage of the vulnerability or problem you have discovered,
|
||||
for example by downloading more data than necessary to demonstrate the
|
||||
vulnerability or deleting or modifying other people's data,
|
||||
- Do not reveal the problem to others until it has been resolved,
|
||||
- Do not use attacks on physical security, social engineering, distributed
|
||||
denial of service, spam or applications of third parties,
|
||||
- Do provide sufficient information to reproduce the problem, so we will be
|
||||
able to resolve it as quickly as possible. Usually, the IP address or the URL
|
||||
of the affected system and a description of the vulnerability will be
|
||||
sufficient, but complex vulnerabilities may require further explanation.
|
||||
|
||||
## What we promise
|
||||
|
||||
- We will respond to your report within 3 business days with our evaluation of
|
||||
the report and an expected resolution date,
|
||||
- If you have followed the instructions above, we will not take any legal
|
||||
action against you in regard to the report,
|
||||
- We will handle your report with strict confidentiality, and not pass on your
|
||||
personal details to third parties without your permission,
|
||||
- We will keep you informed of the progress towards resolving the problem,
|
||||
- In the public information concerning the problem reported, we will give your
|
||||
name as the discoverer of the problem (unless you desire otherwise), and
|
||||
- We strive to resolve all problems as quickly as possible, and we would like
|
||||
to play an active role in the ultimate publication on the problem after it
|
||||
is resolved.
|
|
@ -1,4 +0,0 @@
|
|||
# Checkly Tests
|
||||
|
||||
Run as `yarn checkly test`
|
||||
Deploy the tests as `yarn checkly deploy`
|
|
@ -1,34 +0,0 @@
|
|||
/**
|
||||
* This is a Playwright script Checkly generated for you based on your Vercel project.
|
||||
* To learn more about Browser checks and Playwright visit: https://www.checklyhq.com/docs/browser-checks
|
||||
*/
|
||||
|
||||
// Create a Chromium browser
|
||||
const { chromium } = require("playwright");
|
||||
|
||||
// Checkly supports top level await, but we wrap your code in an async function so you can run it locally too.
|
||||
async function run() {
|
||||
const browser = await chromium.launch();
|
||||
const page = await browser.newPage();
|
||||
|
||||
// If available, we set the target URL to a preview deployment URL provided by the ENVIRONMENT_URL created by Vercel.
|
||||
// Otherwise, we use the Production URL.
|
||||
const targetUrl = process.env.ENVIRONMENT_URL || "https://app.cal.com";
|
||||
|
||||
// We visit the page. This waits for the "load" event by default.
|
||||
const response = await page.goto(targetUrl);
|
||||
|
||||
// If the page doesn't return a successful response code, we fail the check
|
||||
if (response.status() > 399) {
|
||||
throw new Error(`Failed with response code ${response.status()}`);
|
||||
}
|
||||
|
||||
// We snap a screenshot.
|
||||
await page.screenshot({ path: "screenshot.jpg" });
|
||||
|
||||
// We close the page and browser. This is needed for collecting accurate web vitals.
|
||||
await page.close();
|
||||
await browser.close();
|
||||
}
|
||||
|
||||
run();
|
|
@ -1,35 +0,0 @@
|
|||
/**
|
||||
* This is a Playwright script Checkly generated for you based on your Vercel project.
|
||||
* To learn more about Browser checks and Playwright visit: https://www.checklyhq.com/docs/browser-checks
|
||||
*/
|
||||
|
||||
// Create a Chromium browser
|
||||
const { chromium } = require("playwright");
|
||||
|
||||
// Checkly supports top level await, but we wrap your code in an async function so you can run it locally too.
|
||||
async function run() {
|
||||
const browser = await chromium.launch();
|
||||
const page = await browser.newPage();
|
||||
const targetUrl = process.env.ENVIRONMENT_URL || "https://cal.com";
|
||||
await page.goto(`${targetUrl}/rick/test-location-link`);
|
||||
|
||||
await page.waitForSelector('[data-testid="day"][data-disabled="false"]');
|
||||
await page.click('[data-testid="day"][data-disabled="false"]');
|
||||
|
||||
await page.waitForSelector('[data-testid="time"]');
|
||||
await page.click('[data-testid="time"]');
|
||||
|
||||
await page.waitForSelector("#name");
|
||||
await page.click("#name");
|
||||
|
||||
await page.type("#name", "Calcom");
|
||||
|
||||
await page.type('[name="email"]', "cal@cal.com");
|
||||
|
||||
await page.waitForSelector('[data-testid="confirm-book-button"]');
|
||||
await page.click('[data-testid="confirm-book-button"]');
|
||||
|
||||
await browser.close();
|
||||
}
|
||||
|
||||
run();
|
|
@ -1,53 +0,0 @@
|
|||
import type { Page } from "@playwright/test";
|
||||
import { test, expect } from "@playwright/test";
|
||||
|
||||
test.describe("Org", () => {
|
||||
// Because these pages involve next.config.js rewrites, it's better to test them on production
|
||||
test.describe("Embeds - i.cal.com", () => {
|
||||
test("Org Profile Page should be embeddable", async ({ page }) => {
|
||||
const response = await page.goto("https://i.cal.com/embed");
|
||||
expect(response?.status()).toBe(200);
|
||||
await page.screenshot({ path: "screenshot.jpg" });
|
||||
await expectPageToBeServerSideRendered(page);
|
||||
});
|
||||
|
||||
test("Org User(Peer) Page should be embeddable", async ({ page }) => {
|
||||
const response = await page.goto("https://i.cal.com/peer/embed");
|
||||
expect(response?.status()).toBe(200);
|
||||
await expect(page.locator("text=Peer Richelsen")).toBeVisible();
|
||||
await expectPageToBeServerSideRendered(page);
|
||||
});
|
||||
|
||||
test("Org User Event(peer/meet) Page should be embeddable", async ({ page }) => {
|
||||
const response = await page.goto("https://i.cal.com/peer/meet/embed");
|
||||
expect(response?.status()).toBe(200);
|
||||
await expect(page.locator('[data-testid="decrementMonth"]')).toBeVisible();
|
||||
await expect(page.locator('[data-testid="incrementMonth"]')).toBeVisible();
|
||||
await expectPageToBeServerSideRendered(page);
|
||||
});
|
||||
|
||||
test("Org Team Profile(/sales) page should be embeddable", async ({ page }) => {
|
||||
const response = await page.goto("https://i.cal.com/sales/embed");
|
||||
expect(response?.status()).toBe(200);
|
||||
await expect(page.locator("text=Cal.com Sales")).toBeVisible();
|
||||
await expectPageToBeServerSideRendered(page);
|
||||
});
|
||||
|
||||
test("Org Team Event page(/sales/hippa) should be embeddable", async ({ page }) => {
|
||||
const response = await page.goto("https://i.cal.com/sales/hipaa/embed");
|
||||
expect(response?.status()).toBe(200);
|
||||
await expect(page.locator('[data-testid="decrementMonth"]')).toBeVisible();
|
||||
await expect(page.locator('[data-testid="incrementMonth"]')).toBeVisible();
|
||||
await expectPageToBeServerSideRendered(page);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// This ensures that the route is actually mapped to a page that is using withEmbedSsr
|
||||
async function expectPageToBeServerSideRendered(page: Page) {
|
||||
expect(
|
||||
await page.evaluate(() => {
|
||||
return window.__NEXT_DATA__.props.pageProps.isEmbed;
|
||||
})
|
||||
).toBe(true);
|
||||
}
|
77
app.json
77
app.json
|
@ -6,84 +6,19 @@
|
|||
"keywords": ["react", "typescript", "node", "nextjs", "prisma", "postgres", "trpc"],
|
||||
"addons": [
|
||||
{
|
||||
"plan": "heroku-postgresql:mini"
|
||||
"plan": "heroku-postgresql:hobby-dev"
|
||||
}
|
||||
],
|
||||
"env": {
|
||||
"PGSSLMODE": {
|
||||
"description": "If you use Heroku to deploy Postgres (or use self-signed certs for Postgres) then put 'no-verify' here.",
|
||||
"value": "no-verify"
|
||||
},
|
||||
"NEXT_PUBLIC_WEBAPP_URL": {
|
||||
"description": "Replace HEROKU_APP_NAME with the name given to your app",
|
||||
"value": "https://HEROKU_APP_NAME.herokuapp.com",
|
||||
"required": "true"
|
||||
},
|
||||
"CALENDSO_ENCRYPTION_KEY": {
|
||||
"description": "Application Key for symmetric encryption and decryption. Must be 32 bytes for AES256 encryption algorithm.",
|
||||
"value": "secret",
|
||||
"required": "true"
|
||||
},
|
||||
"NEXTAUTH_URL": {
|
||||
"BASE_URL": {
|
||||
"description": "Replace HEROKU_APP_NAME with the name given to your app",
|
||||
"value": "https://HEROKU_APP_NAME.herokuapp.com"
|
||||
},
|
||||
"NEXTAUTH_SECRET": {
|
||||
"description": "Cookie encryption key",
|
||||
"generator": "secret",
|
||||
"required": "true"
|
||||
"CALENDSO_ENCRYPTION_KEY": {
|
||||
"description": "Application Key for symmetric encryption and decryption. Must be 32 bytes for AES256 encryption algorithm.",
|
||||
"value": "secret"
|
||||
},
|
||||
"CRON_API_KEY": {
|
||||
"description": "ApiKey for cronjobs",
|
||||
"value": ""
|
||||
},
|
||||
"CRON_ENABLE_APP_SYNC": {
|
||||
"description": "Whether to automatically keep app metadata in the database in sync with the metadata/config files. When disabled, the sync runs in a reporting-only dry-run mode.",
|
||||
"value": "false"
|
||||
},
|
||||
"SEND_FEEDBACK_EMAIL": {
|
||||
"description": "Send feedback email",
|
||||
"value": ""
|
||||
},
|
||||
"SENDGRID_API_KEY": {
|
||||
"description": "Sendgrid api key. Used for email reminders in workflows",
|
||||
"value": "",
|
||||
"required": false
|
||||
},
|
||||
"SENDGRID_SYNC_API_KEY": {
|
||||
"description": "Sendgrid internal sync service",
|
||||
"value": "",
|
||||
"required": false
|
||||
},
|
||||
"SENDGRID_EMAIL": {
|
||||
"description": "Sendgrid email. Used for email reminders in workflows",
|
||||
"value": "",
|
||||
"required": false
|
||||
},
|
||||
"EMAIL_FROM": {
|
||||
"description": "Configures the global From: header whilst sending emails",
|
||||
"value": ""
|
||||
},
|
||||
"EMAIL_SERVER_HOST": {
|
||||
"description": "Configures the global SMTP server host",
|
||||
"value": "smtp.gmail.com"
|
||||
},
|
||||
"EMAIL_SERVER_PORT": {
|
||||
"description": "Configures the global SMTP server port",
|
||||
"value": "465"
|
||||
},
|
||||
"EMAIL_SERVER_USER": {
|
||||
"description": "Configures the global SMTP server user",
|
||||
"value": "<gmail_emailAddress>"
|
||||
},
|
||||
"EMAIL_SERVER_PASSWORD": {
|
||||
"description": "Configures the global SMTP server password",
|
||||
"value": "<gmail_app_password>"
|
||||
},
|
||||
"NEXT_PUBLIC_TEAM_IMPERSONATION": {
|
||||
"description": "Set the following value to true if you wish to enable Team Impersonation",
|
||||
"value": "false"
|
||||
}
|
||||
"NEXTAUTH_SECRET": "secret"
|
||||
},
|
||||
"scripts": {
|
||||
"postdeploy": "cd packages/prisma && npx prisma migrate deploy"
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Subproject commit 943cd10de1f6661273d2ec18acdaa93118852714
|
|
@ -1,21 +0,0 @@
|
|||
BACKEND_URL=http://localhost:3002/api
|
||||
# BACKEND_URL=https://api.cal.com/v1
|
||||
FRONTEND_URL=http://localhost:3000
|
||||
# FRONTEND_URL=https://cal.com
|
||||
|
||||
APP_ID=cal-ai
|
||||
APP_URL=http://localhost:3000/apps/cal-ai
|
||||
|
||||
# This is for the onboard route. Which domain should we send emails from?
|
||||
SENDER_DOMAIN=cal.ai
|
||||
|
||||
# Used to verify requests from sendgrid. You can generate a new one with: `openssl rand -hex 32`
|
||||
PARSE_KEY=
|
||||
|
||||
OPENAI_API_KEY=
|
||||
|
||||
# Optionally trace completions at https://smith.langchain.com
|
||||
# LANGCHAIN_TRACING_V2=true
|
||||
# LANGCHAIN_ENDPOINT=
|
||||
# LANGCHAIN_API_KEY=
|
||||
# LANGCHAIN_PROJECT=
|
|
@ -1,64 +0,0 @@
|
|||
# Cal.ai
|
||||
|
||||
Welcome to [Cal.ai](https://cal.ai)!
|
||||
|
||||
This app lets you chat with your calendar via email:
|
||||
|
||||
- Turn informal emails into bookings eg. forward "wanna meet tmrw at 2pm?"
|
||||
- List and rearrange your bookings eg. "clear my afternoon"
|
||||
- Answer basic questions about your busiest times eg. "how does my Tuesday look?"
|
||||
|
||||
The core logic is contained in [agent/route.ts](/apps/ai/src/app/api/agent/route.ts). Here, a [LangChain Agent Executor](https://docs.langchain.com/docs/components/agents/agent-executor) is tasked with following your instructions. Given your last-known timezone, working hours, and busy times, it attempts to CRUD your bookings.
|
||||
|
||||
_The AI agent can only choose from a set of tools, without ever seeing your API key._
|
||||
|
||||
Emails are cleaned and routed in [receive/route.ts](/apps/ai/src/app/api/receive/route.ts) using [MailParser](https://nodemailer.com/extras/mailparser/).
|
||||
|
||||
Incoming emails are routed by email address. Addresses are verified by [DKIM record](https://support.google.com/a/answer/174124?hl=en), making them hard to spoof.
|
||||
|
||||
## Recognition
|
||||
|
||||
<a href="https://www.producthunt.com/posts/cal-ai?utm_source=badge-top-post-badge&utm_medium=badge&utm_souce=badge-cal-ai" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/top-post-badge.svg?post_id=419860&theme=light&period=daily" alt="Cal.ai - World's first open source AI scheduling assistant | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a> <a href="https://www.producthunt.com/posts/cal-ai?utm_source=badge-featured&utm_medium=badge&utm_souce=badge-cal-ai" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=419860&theme=light" alt="Cal.ai - World's first open source AI scheduling assistant | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Development
|
||||
|
||||
If you haven't yet, please run the [root setup](/README.md) steps.
|
||||
|
||||
Before running the app, please see [env.mjs](./src/env.mjs) for all required environment variables. Run `cp .env.example .env` in this folder to get started. You'll need:
|
||||
|
||||
- An [OpenAI API key](https://platform.openai.com/account/api-keys) with access to GPT-4
|
||||
- A [SendGrid API key](https://app.sendgrid.com/settings/api_keys)
|
||||
- A default sender email (for example, `me@dev.example.com`)
|
||||
- The Cal.ai app's ID and URL (see [add.ts](/packages/app-store/cal-ai/api/index.ts))
|
||||
- A unique value for `PARSE_KEY` with `openssl rand -hex 32`
|
||||
|
||||
To stand up the API and AI apps simultaneously, simply run `yarn dev:ai`.
|
||||
|
||||
### Agent Architecture
|
||||
|
||||
The scheduling agent in [agent/route.ts](/apps/ai/src/app/api/agent/route.ts) calls an LLM (in this case, GPT-4) in a loop to accomplish a multi-step task. We use an [OpenAI Functions agent](https://js.langchain.com/docs/modules/agents/agent_types/openai_functions_agent), which is fine-tuned to output text suited for passing to tools.
|
||||
|
||||
Tools (eg. [`createBooking`](/apps/ai/src/tools/createBooking.ts)) are simply JavaScript methods wrapped by Zod schemas, telling the agent what format to output.
|
||||
|
||||
Here is the full architecture:
|
||||
|
||||
![Cal.ai architecture](/apps/ai/src/public/architecture.png)
|
||||
|
||||
### Email Router
|
||||
|
||||
To expose the AI app, run `ngrok http 3005` (or the AI app's port number) in a new terminal. You may need to install [nGrok](https://ngrok.com/).
|
||||
|
||||
To forward incoming emails to the serverless function at `/agent`, we use [SendGrid's Inbound Parse](https://docs.sendgrid.com/for-developers/parsing-email/setting-up-the-inbound-parse-webhook).
|
||||
|
||||
1. Ensure you have a [SendGrid account](https://signup.sendgrid.com/)
|
||||
2. Ensure you have an authenticated domain. Go to Settings > Sender Authentication > Authenticate. For DNS host, select `I'm not sure`. Click Next and add your domain, eg. `example.com`. Choose Manual Setup. You'll be given three CNAME records to add to your DNS settings, eg. in [Vercel Domains](https://vercel.com/dashboard/domains). After adding those records, click Verify. To troubleshoot, see the [full instructions](https://docs.sendgrid.com/ui/account-and-settings/how-to-set-up-domain-authentication).
|
||||
3. Authorize your domain for email with MX records: one with name `[your domain].com` and value `mx.sendgrid.net.`, and another with name `bounces.[your domain].com` and value `feedback-smtp.us-east-1.amazonses.com`, both with priority `10` if prompted.
|
||||
4. Go to Settings > [Inbound Parse](https://app.sendgrid.com/settings/parse) > Add Host & URL. Choose your authenticated domain.
|
||||
5. In the Destination URL field, use the nGrok URL from above along with the path, `/api/receive`, and one param, `parseKey`, which lives in [this app's .env](/apps/ai/.env.example) under `PARSE_KEY`. The full URL should look like `https://abc.ngrok.io/api/receive?parseKey=ABC-123`.
|
||||
6. Activate "POST the raw, full MIME message".
|
||||
7. Send an email to `[anyUsername]@example.com`. You should see a ping on the nGrok listener and server.
|
||||
8. Adjust the logic in [receive/route.ts](/apps/ai/src/app/api/receive/route.ts), save to hot-reload, and send another email to test the behaviour.
|
||||
|
||||
Please feel free to improve any part of this architecture!
|
|
@ -1,24 +0,0 @@
|
|||
const withBundleAnalyzer = require("@next/bundle-analyzer");
|
||||
|
||||
const plugins = [];
|
||||
plugins.push(withBundleAnalyzer({ enabled: process.env.ANALYZE === "true" }));
|
||||
|
||||
/** @type {import("next").NextConfig} */
|
||||
const nextConfig = {
|
||||
async redirects() {
|
||||
return [
|
||||
{
|
||||
source: "/",
|
||||
destination: "https://cal.com/ai",
|
||||
permanent: true,
|
||||
},
|
||||
];
|
||||
},
|
||||
i18n: {
|
||||
defaultLocale: "en",
|
||||
locales: ["en"],
|
||||
},
|
||||
reactStrictMode: true,
|
||||
};
|
||||
|
||||
module.exports = () => plugins.reduce((acc, next) => next(acc), nextConfig);
|
|
@ -1,26 +0,0 @@
|
|||
{
|
||||
"name": "@calcom/ai",
|
||||
"version": "1.2.1",
|
||||
"private": true,
|
||||
"author": "Cal.com Inc.",
|
||||
"dependencies": {
|
||||
"@calcom/prisma": "*",
|
||||
"@t3-oss/env-nextjs": "^0.6.1",
|
||||
"langchain": "^0.0.131",
|
||||
"mailparser": "^3.6.5",
|
||||
"next": "^13.5.4",
|
||||
"supports-color": "8.1.1",
|
||||
"zod": "^3.22.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/mailparser": "^3.4.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "next build",
|
||||
"dev": "next dev -p 3005",
|
||||
"format": "npx prettier . --write",
|
||||
"lint": "eslint . --ext .ts,.js,.tsx,.jsx",
|
||||
"lint:fix": "eslint . --ext .ts,.js,.tsx,.jsx --fix",
|
||||
"start": "next start"
|
||||
}
|
||||
}
|
|
@ -1,55 +0,0 @@
|
|||
import type { NextRequest } from "next/server";
|
||||
import { NextResponse } from "next/server";
|
||||
|
||||
import agent from "../../../utils/agent";
|
||||
import sendEmail from "../../../utils/sendEmail";
|
||||
import { verifyParseKey } from "../../../utils/verifyParseKey";
|
||||
|
||||
// Allow agent loop to run for up to 5 minutes
|
||||
export const maxDuration = 300;
|
||||
|
||||
/**
|
||||
* Launches a LangChain agent to process an incoming email,
|
||||
* then sends the response to the user.
|
||||
*/
|
||||
export const POST = async (request: NextRequest) => {
|
||||
const verified = verifyParseKey(request.url);
|
||||
|
||||
if (!verified) {
|
||||
return new NextResponse("Unauthorized", { status: 401 });
|
||||
}
|
||||
|
||||
const json = await request.json();
|
||||
|
||||
const { apiKey, userId, message, subject, user, users, replyTo: agentEmail } = json;
|
||||
|
||||
if ((!message && !subject) || !user) {
|
||||
return new NextResponse("Missing fields", { status: 400 });
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await agent(`${subject}\n\n${message}`, { ...user }, users, apiKey, userId, agentEmail);
|
||||
|
||||
// Send response to user
|
||||
await sendEmail({
|
||||
subject: `Re: ${subject}`,
|
||||
text: response.replace(/(?:\r\n|\r|\n)/g, "\n"),
|
||||
to: user.email,
|
||||
from: agentEmail,
|
||||
});
|
||||
|
||||
return new NextResponse("ok");
|
||||
} catch (error) {
|
||||
await sendEmail({
|
||||
subject: `Re: ${subject}`,
|
||||
text: "Thanks for using Cal.ai! We're experiencing high demand and can't currently process your request. Please try again later.",
|
||||
to: user.email,
|
||||
from: agentEmail,
|
||||
});
|
||||
|
||||
return new NextResponse(
|
||||
(error as Error).message || "Something went wrong. Please try again or reach out for help.",
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
};
|
|
@ -1,44 +0,0 @@
|
|||
import type { NextRequest } from "next/server";
|
||||
|
||||
import prisma from "@calcom/prisma";
|
||||
|
||||
import { env } from "../../../env.mjs";
|
||||
import sendEmail from "../../../utils/sendEmail";
|
||||
|
||||
export const POST = async (request: NextRequest) => {
|
||||
const { userId } = await request.json();
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
select: {
|
||||
email: true,
|
||||
name: true,
|
||||
username: true,
|
||||
},
|
||||
where: {
|
||||
id: userId,
|
||||
},
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
return new Response("User not found", { status: 404 });
|
||||
}
|
||||
|
||||
await sendEmail({
|
||||
subject: "Welcome to Cal AI",
|
||||
to: user.email,
|
||||
from: `${user.username}@${env.SENDER_DOMAIN}`,
|
||||
text: `Hi ${
|
||||
user.name || `@${user.username}`
|
||||
},\n\nI'm Cal AI, your personal booking assistant! I'll be here, 24/7 to help manage your busy schedule and find times to meet with the people you care about.\n\nHere are some things you can ask me:\n\n- "Book a meeting with @someone" (The @ symbol lets you tag Cal.com users)\n- "What meetings do I have today?" (I'll show you your schedule)\n- "Find a time for coffee with someone@gmail.com" (I'll intro and send them some good times)\n\nI'm still learning, so if you have any feedback, please tweet it to @calcom!\n\nRemember, you can always reach me here, at ${
|
||||
user.username
|
||||
}@${
|
||||
env.SENDER_DOMAIN
|
||||
}.\n\nLooking forward to working together (:\n\n- Cal AI, Your personal booking assistant`,
|
||||
html: `Hi ${
|
||||
user.name || `@${user.username}`
|
||||
},<br><br>I'm Cal AI, your personal booking assistant! I'll be here, 24/7 to help manage your busy schedule and find times to meet with the people you care about.<br><br>Here are some things you can ask me:<br><br>- "Book a meeting with @someone" (The @ symbol lets you tag Cal.com users)<br>- "What meetings do I have today?" (I'll show you your schedule)<br>- "Find a time for coffee with someone@gmail.com" (I'll intro and send them some good times)<br><br>I'm still learning, so if you have any feedback, please send it to <a href="https://twitter.com/calcom">@calcom</a> on X!<br><br>Remember, you can always reach me here, at ${
|
||||
user.username
|
||||
}@${env.SENDER_DOMAIN}.<br><br>Looking forward to working together (:<br><br>- Cal AI`,
|
||||
});
|
||||
return new Response("OK", { status: 200 });
|
||||
};
|
|
@ -1,186 +0,0 @@
|
|||
import type { ParsedMail, Source } from "mailparser";
|
||||
import { simpleParser } from "mailparser";
|
||||
import type { NextRequest } from "next/server";
|
||||
import { NextResponse } from "next/server";
|
||||
|
||||
import { checkRateLimitAndThrowError } from "@calcom/lib/checkRateLimitAndThrowError";
|
||||
import prisma from "@calcom/prisma";
|
||||
|
||||
import { env } from "../../../env.mjs";
|
||||
import { fetchAvailability } from "../../../tools/getAvailability";
|
||||
import { fetchEventTypes } from "../../../tools/getEventTypes";
|
||||
import { extractUsers } from "../../../utils/extractUsers";
|
||||
import getHostFromHeaders from "../../../utils/host";
|
||||
import now from "../../../utils/now";
|
||||
import sendEmail from "../../../utils/sendEmail";
|
||||
import { verifyParseKey } from "../../../utils/verifyParseKey";
|
||||
|
||||
// Allow receive loop to run for up to 30 seconds
|
||||
// Why so long? the rate determining API call (getAvailability, getEventTypes) can take up to 15 seconds at peak times so we give it a little extra time to complete.
|
||||
export const maxDuration = 30;
|
||||
|
||||
/**
|
||||
* Verifies email signature and app authorization,
|
||||
* then hands off to booking agent.
|
||||
*/
|
||||
export const POST = async (request: NextRequest) => {
|
||||
const verified = verifyParseKey(request.url);
|
||||
|
||||
if (!verified) {
|
||||
return new NextResponse("Unauthorized", { status: 401 });
|
||||
}
|
||||
|
||||
const formData = await request.formData();
|
||||
const body = Object.fromEntries(formData);
|
||||
const envelope = JSON.parse(body.envelope as string);
|
||||
|
||||
const aiEmail = envelope.to[0];
|
||||
const subject = body.subject || "";
|
||||
|
||||
try {
|
||||
await checkRateLimitAndThrowError({
|
||||
identifier: `ai:email:${envelope.from}`,
|
||||
rateLimitingType: "ai",
|
||||
});
|
||||
} catch (error) {
|
||||
await sendEmail({
|
||||
subject: `Re: ${subject}`,
|
||||
text: "Thanks for using Cal.ai! You've reached your daily limit. Please try again tomorrow.",
|
||||
to: envelope.from,
|
||||
from: aiEmail,
|
||||
});
|
||||
|
||||
return new NextResponse("Exceeded rate limit", { status: 200 }); // Don't return 429 to avoid triggering retry logic in SendGrid
|
||||
}
|
||||
|
||||
// Parse email from mixed MIME type
|
||||
const parsed: ParsedMail = await simpleParser(body.email as Source);
|
||||
|
||||
if (!parsed.text && !parsed.subject) {
|
||||
await sendEmail({
|
||||
subject: `Re: ${subject}`,
|
||||
text: "Thanks for using Cal.ai! It looks like you forgot to include a message. Please try again.",
|
||||
to: envelope.from,
|
||||
from: aiEmail,
|
||||
});
|
||||
return new NextResponse("Email missing text and subject", { status: 400 });
|
||||
}
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
select: {
|
||||
email: true,
|
||||
id: true,
|
||||
username: true,
|
||||
timeZone: true,
|
||||
credentials: {
|
||||
select: {
|
||||
appId: true,
|
||||
key: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
where: { email: envelope.from },
|
||||
});
|
||||
|
||||
// body.dkim looks like {@domain-com.22222222.gappssmtp.com : pass}
|
||||
const signature = (body.dkim as string).includes(" : pass");
|
||||
|
||||
// User is not a cal.com user or is using an unverified email.
|
||||
if (!signature || !user) {
|
||||
await sendEmail({
|
||||
html: `Thanks for your interest in Cal.ai! To get started, Make sure you have a <a href="https://cal.com/signup" target="_blank">cal.com</a> account with this email address and then install Cal.ai here: <a href="https://go.cal.com/ai" target="_blank">go.cal.com/ai</a>.`,
|
||||
subject: `Re: ${subject}`,
|
||||
text: `Thanks for your interest in Cal.ai! To get started, Make sure you have a cal.com account with this email address. You can sign up for an account at: https://cal.com/signup`,
|
||||
to: envelope.from,
|
||||
from: aiEmail,
|
||||
});
|
||||
|
||||
return new NextResponse("ok");
|
||||
}
|
||||
|
||||
const credential = user.credentials.find((c) => c.appId === env.APP_ID)?.key;
|
||||
|
||||
// User has not installed the app from the app store. Direct them to install it.
|
||||
if (!(credential as { apiKey: string })?.apiKey) {
|
||||
const url = env.APP_URL;
|
||||
|
||||
await sendEmail({
|
||||
html: `Thanks for using Cal.ai! To get started, the app must be installed. <a href=${url} target="_blank">Click this link</a> to install it.`,
|
||||
subject: `Re: ${subject}`,
|
||||
text: `Thanks for using Cal.ai! To get started, the app must be installed. Click this link to install the Cal.ai app: ${url}`,
|
||||
to: envelope.from,
|
||||
from: aiEmail,
|
||||
});
|
||||
|
||||
return new NextResponse("ok");
|
||||
}
|
||||
|
||||
const { apiKey } = credential as { apiKey: string };
|
||||
|
||||
// Pre-fetch data relevant to most bookings.
|
||||
const [eventTypes, availability, users] = await Promise.all([
|
||||
fetchEventTypes({
|
||||
apiKey,
|
||||
}),
|
||||
fetchAvailability({
|
||||
apiKey,
|
||||
userId: user.id,
|
||||
dateFrom: now(user.timeZone),
|
||||
dateTo: now(user.timeZone),
|
||||
}),
|
||||
extractUsers(`${parsed.text} ${parsed.subject}`),
|
||||
]);
|
||||
|
||||
if ("error" in availability) {
|
||||
await sendEmail({
|
||||
subject: `Re: ${subject}`,
|
||||
text: "Sorry, there was an error fetching your availability. Please try again.",
|
||||
to: user.email,
|
||||
from: aiEmail,
|
||||
});
|
||||
console.error(availability.error);
|
||||
return new NextResponse("Error fetching availability. Please try again.", { status: 400 });
|
||||
}
|
||||
|
||||
if ("error" in eventTypes) {
|
||||
await sendEmail({
|
||||
subject: `Re: ${subject}`,
|
||||
text: "Sorry, there was an error fetching your event types. Please try again.",
|
||||
to: user.email,
|
||||
from: aiEmail,
|
||||
});
|
||||
console.error(eventTypes.error);
|
||||
return new NextResponse("Error fetching event types. Please try again.", { status: 400 });
|
||||
}
|
||||
|
||||
const { workingHours } = availability;
|
||||
|
||||
const appHost = getHostFromHeaders(request.headers);
|
||||
|
||||
// Hand off to long-running agent endpoint to handle the email. (don't await)
|
||||
fetch(`${appHost}/api/agent?parseKey=${env.PARSE_KEY}`, {
|
||||
body: JSON.stringify({
|
||||
apiKey,
|
||||
userId: user.id,
|
||||
message: parsed.text || "",
|
||||
subject: parsed.subject || "",
|
||||
replyTo: aiEmail,
|
||||
user: {
|
||||
email: user.email,
|
||||
eventTypes,
|
||||
username: user.username,
|
||||
timeZone: user.timeZone,
|
||||
workingHours,
|
||||
},
|
||||
users,
|
||||
}),
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
method: "POST",
|
||||
});
|
||||
|
||||
await new Promise((r) => setTimeout(r, 1000));
|
||||
|
||||
return new NextResponse("ok");
|
||||
};
|
|
@ -1,47 +0,0 @@
|
|||
import { createEnv } from "@t3-oss/env-nextjs";
|
||||
import { z } from "zod";
|
||||
|
||||
export const env = createEnv({
|
||||
/**
|
||||
* Specify your client-side environment variables schema here. This way you can ensure the app
|
||||
* isn't built with invalid env vars. To expose them to the client, prefix them with
|
||||
* `NEXT_PUBLIC_`.
|
||||
*/
|
||||
client: {
|
||||
// NEXT_PUBLIC_CLIENTVAR: z.string().min(1),
|
||||
},
|
||||
|
||||
/**
|
||||
* You can't destruct `process.env` as a regular object in the Next.js edge runtimes (e.g.
|
||||
* middlewares) or client-side so we need to destruct manually.
|
||||
*/
|
||||
runtimeEnv: {
|
||||
BACKEND_URL: process.env.BACKEND_URL,
|
||||
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||
APP_ID: process.env.APP_ID,
|
||||
APP_URL: process.env.APP_URL,
|
||||
SENDER_DOMAIN: process.env.SENDER_DOMAIN,
|
||||
PARSE_KEY: process.env.PARSE_KEY,
|
||||
NODE_ENV: process.env.NODE_ENV,
|
||||
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
|
||||
SENDGRID_API_KEY: process.env.SENDGRID_API_KEY,
|
||||
DATABASE_URL: process.env.DATABASE_URL,
|
||||
},
|
||||
|
||||
/**
|
||||
* Specify your server-side environment variables schema here. This way you can ensure the app
|
||||
* isn't built with invalid env vars.
|
||||
*/
|
||||
server: {
|
||||
BACKEND_URL: z.string().url(),
|
||||
FRONTEND_URL: z.string().url(),
|
||||
APP_ID: z.string().min(1),
|
||||
APP_URL: z.string().url(),
|
||||
SENDER_DOMAIN: z.string().min(1),
|
||||
PARSE_KEY: z.string().min(1),
|
||||
NODE_ENV: z.enum(["development", "test", "production"]),
|
||||
OPENAI_API_KEY: z.string().min(1),
|
||||
SENDGRID_API_KEY: z.string().min(1),
|
||||
DATABASE_URL: z.string().url(),
|
||||
},
|
||||
});
|
Binary file not shown.
Before Width: | Height: | Size: 125 KiB |
|
@ -1,121 +0,0 @@
|
|||
import { DynamicStructuredTool } from "langchain/tools";
|
||||
import { z } from "zod";
|
||||
|
||||
import type { UserList } from "~/src/types/user";
|
||||
|
||||
import { env } from "../env.mjs";
|
||||
|
||||
/**
|
||||
* Creates a booking for a user by event type, times, and timezone.
|
||||
*/
|
||||
const createBooking = async ({
|
||||
apiKey,
|
||||
userId,
|
||||
users,
|
||||
eventTypeId,
|
||||
start,
|
||||
end,
|
||||
timeZone,
|
||||
language,
|
||||
invite,
|
||||
}: {
|
||||
apiKey: string;
|
||||
userId: number;
|
||||
users: UserList;
|
||||
eventTypeId: number;
|
||||
start: string;
|
||||
end: string;
|
||||
timeZone: string;
|
||||
language: string;
|
||||
invite: number;
|
||||
title?: string;
|
||||
status?: string;
|
||||
}): Promise<string | Error | { error: string }> => {
|
||||
const params = {
|
||||
apiKey,
|
||||
userId: userId.toString(),
|
||||
};
|
||||
|
||||
const urlParams = new URLSearchParams(params);
|
||||
|
||||
const url = `${env.BACKEND_URL}/bookings?${urlParams.toString()}`;
|
||||
|
||||
const user = users.find((u) => u.id === invite);
|
||||
|
||||
if (!user) {
|
||||
return { error: `User with id ${invite} not found to invite` };
|
||||
}
|
||||
|
||||
const responses = {
|
||||
id: invite.toString(),
|
||||
name: user.username,
|
||||
email: user.email,
|
||||
};
|
||||
|
||||
const response = await fetch(url, {
|
||||
body: JSON.stringify({
|
||||
end,
|
||||
eventTypeId,
|
||||
language,
|
||||
metadata: {},
|
||||
responses,
|
||||
start,
|
||||
timeZone,
|
||||
}),
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
method: "POST",
|
||||
});
|
||||
|
||||
// Let GPT handle this. This will happen when wrong event type id is used.
|
||||
// if (response.status === 401) throw new Error("Unauthorized");
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (response.status !== 200) {
|
||||
return {
|
||||
error: data.message,
|
||||
};
|
||||
}
|
||||
|
||||
return "Booking created";
|
||||
};
|
||||
|
||||
const createBookingTool = (apiKey: string, userId: number, users: UserList) => {
|
||||
return new DynamicStructuredTool({
|
||||
description: "Creates a booking on the primary user's calendar.",
|
||||
func: async ({ eventTypeId, start, end, timeZone, language, invite, title, status }) => {
|
||||
return JSON.stringify(
|
||||
await createBooking({
|
||||
apiKey,
|
||||
userId,
|
||||
users,
|
||||
end,
|
||||
eventTypeId,
|
||||
language,
|
||||
invite,
|
||||
start,
|
||||
status,
|
||||
timeZone,
|
||||
title,
|
||||
})
|
||||
);
|
||||
},
|
||||
name: "createBooking",
|
||||
schema: z.object({
|
||||
end: z
|
||||
.string()
|
||||
.describe("This should correspond to the event type's length, unless otherwise specified."),
|
||||
eventTypeId: z.number(),
|
||||
language: z.string(),
|
||||
invite: z.number().describe("External user id to invite."),
|
||||
start: z.string(),
|
||||
status: z.string().optional().describe("ACCEPTED, PENDING, CANCELLED or REJECTED"),
|
||||
timeZone: z.string(),
|
||||
title: z.string().optional(),
|
||||
}),
|
||||
});
|
||||
};
|
||||
|
||||
export default createBookingTool;
|
|
@ -1,66 +0,0 @@
|
|||
import { DynamicStructuredTool } from "langchain/tools";
|
||||
import { z } from "zod";
|
||||
|
||||
import { env } from "../env.mjs";
|
||||
|
||||
/**
|
||||
* Cancels a booking for a user by ID with reason.
|
||||
*/
|
||||
const cancelBooking = async ({
|
||||
apiKey,
|
||||
id,
|
||||
reason,
|
||||
}: {
|
||||
apiKey: string;
|
||||
id: string;
|
||||
reason: string;
|
||||
}): Promise<string | { error: string }> => {
|
||||
const params = {
|
||||
apiKey,
|
||||
};
|
||||
|
||||
const urlParams = new URLSearchParams(params);
|
||||
|
||||
const url = `${env.BACKEND_URL}/bookings/${id}/cancel?${urlParams.toString()}`;
|
||||
|
||||
const response = await fetch(url, {
|
||||
body: JSON.stringify({ reason }),
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
method: "DELETE",
|
||||
});
|
||||
|
||||
// Let GPT handle this. This will happen when wrong booking id is used.
|
||||
// if (response.status === 401) throw new Error("Unauthorized");
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (response.status !== 200) {
|
||||
return { error: data.message };
|
||||
}
|
||||
|
||||
return "Booking cancelled";
|
||||
};
|
||||
|
||||
const cancelBookingTool = (apiKey: string) => {
|
||||
return new DynamicStructuredTool({
|
||||
description: "Cancel a booking",
|
||||
func: async ({ id, reason }) => {
|
||||
return JSON.stringify(
|
||||
await cancelBooking({
|
||||
apiKey,
|
||||
id,
|
||||
reason,
|
||||
})
|
||||
);
|
||||
},
|
||||
name: "cancelBooking",
|
||||
schema: z.object({
|
||||
id: z.string(),
|
||||
reason: z.string(),
|
||||
}),
|
||||
});
|
||||
};
|
||||
|
||||
export default cancelBookingTool;
|
|
@ -1,77 +0,0 @@
|
|||
import { DynamicStructuredTool } from "langchain/tools";
|
||||
import { z } from "zod";
|
||||
|
||||
import { env } from "../env.mjs";
|
||||
import type { Availability } from "../types/availability";
|
||||
|
||||
/**
|
||||
* Fetches availability for a user by date range and event type.
|
||||
*/
|
||||
export const fetchAvailability = async ({
|
||||
apiKey,
|
||||
userId,
|
||||
dateFrom,
|
||||
dateTo,
|
||||
}: {
|
||||
apiKey: string;
|
||||
userId: number;
|
||||
dateFrom: string;
|
||||
dateTo: string;
|
||||
}): Promise<Partial<Availability> | { error: string }> => {
|
||||
const params: { [k: string]: string } = {
|
||||
apiKey,
|
||||
userId: userId.toString(),
|
||||
dateFrom,
|
||||
dateTo,
|
||||
};
|
||||
|
||||
const urlParams = new URLSearchParams(params);
|
||||
|
||||
const url = `${env.BACKEND_URL}/availability?${urlParams.toString()}`;
|
||||
|
||||
const response = await fetch(url);
|
||||
|
||||
if (response.status === 401) throw new Error("Unauthorized");
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (response.status !== 200) {
|
||||
return { error: data.message };
|
||||
}
|
||||
|
||||
return {
|
||||
busy: data.busy,
|
||||
dateRanges: data.dateRanges,
|
||||
timeZone: data.timeZone,
|
||||
workingHours: data.workingHours,
|
||||
};
|
||||
};
|
||||
|
||||
const getAvailabilityTool = (apiKey: string) => {
|
||||
return new DynamicStructuredTool({
|
||||
description: "Get availability of users within range.",
|
||||
func: async ({ userIds, dateFrom, dateTo }) => {
|
||||
return JSON.stringify(
|
||||
await Promise.all(
|
||||
userIds.map(
|
||||
async (userId) =>
|
||||
await fetchAvailability({
|
||||
userId: userId,
|
||||
apiKey,
|
||||
dateFrom,
|
||||
dateTo,
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
},
|
||||
name: "getAvailability",
|
||||
schema: z.object({
|
||||
userIds: z.array(z.number()).describe("The users to fetch availability for."),
|
||||
dateFrom: z.string(),
|
||||
dateTo: z.string(),
|
||||
}),
|
||||
});
|
||||
};
|
||||
|
||||
export default getAvailabilityTool;
|
|
@ -1,75 +0,0 @@
|
|||
import { DynamicStructuredTool } from "langchain/tools";
|
||||
import { z } from "zod";
|
||||
|
||||
import { env } from "../env.mjs";
|
||||
import type { Booking } from "../types/booking";
|
||||
import { BOOKING_STATUS } from "../types/booking";
|
||||
|
||||
/**
|
||||
* Fetches bookings for a user by date range.
|
||||
*/
|
||||
const fetchBookings = async ({
|
||||
apiKey,
|
||||
userId,
|
||||
from,
|
||||
to,
|
||||
}: {
|
||||
apiKey: string;
|
||||
userId: number;
|
||||
from: string;
|
||||
to: string;
|
||||
}): Promise<Booking[] | { error: string }> => {
|
||||
const params = {
|
||||
apiKey,
|
||||
userId: userId.toString(),
|
||||
};
|
||||
|
||||
const urlParams = new URLSearchParams(params);
|
||||
|
||||
const url = `${env.BACKEND_URL}/bookings?${urlParams.toString()}`;
|
||||
|
||||
const response = await fetch(url);
|
||||
|
||||
if (response.status === 401) throw new Error("Unauthorized");
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (response.status !== 200) {
|
||||
return { error: data.message };
|
||||
}
|
||||
|
||||
const bookings: Booking[] = data.bookings
|
||||
.filter((booking: Booking) => {
|
||||
const afterFrom = new Date(booking.startTime).getTime() > new Date(from).getTime();
|
||||
const beforeTo = new Date(booking.endTime).getTime() < new Date(to).getTime();
|
||||
const notCancelled = booking.status !== BOOKING_STATUS.CANCELLED;
|
||||
|
||||
return afterFrom && beforeTo && notCancelled;
|
||||
})
|
||||
.map(({ endTime, eventTypeId, id, startTime, status, title }: Booking) => ({
|
||||
endTime,
|
||||
eventTypeId,
|
||||
id,
|
||||
startTime,
|
||||
status,
|
||||
title,
|
||||
}));
|
||||
|
||||
return bookings;
|
||||
};
|
||||
|
||||
const getBookingsTool = (apiKey: string, userId: number) => {
|
||||
return new DynamicStructuredTool({
|
||||
description: "Get bookings for the primary user between two dates.",
|
||||
func: async ({ from, to }) => {
|
||||
return JSON.stringify(await fetchBookings({ apiKey, userId, from, to }));
|
||||
},
|
||||
name: "getBookings",
|
||||
schema: z.object({
|
||||
from: z.string().describe("ISO 8601 datetime string"),
|
||||
to: z.string().describe("ISO 8601 datetime string"),
|
||||
}),
|
||||
});
|
||||
};
|
||||
|
||||
export default getBookingsTool;
|
|
@ -1,59 +0,0 @@
|
|||
import { DynamicStructuredTool } from "langchain/tools";
|
||||
import { z } from "zod";
|
||||
|
||||
import { env } from "../env.mjs";
|
||||
import type { EventType } from "../types/eventType";
|
||||
|
||||
/**
|
||||
* Fetches event types by user ID.
|
||||
*/
|
||||
export const fetchEventTypes = async ({ apiKey, userId }: { apiKey: string; userId?: number }) => {
|
||||
const params: Record<string, string> = {
|
||||
apiKey,
|
||||
};
|
||||
|
||||
if (userId) {
|
||||
params["userId"] = userId.toString();
|
||||
}
|
||||
|
||||
const urlParams = new URLSearchParams(params);
|
||||
|
||||
const url = `${env.BACKEND_URL}/event-types?${urlParams.toString()}`;
|
||||
|
||||
const response = await fetch(url);
|
||||
|
||||
if (response.status === 401) throw new Error("Unauthorized");
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (response.status !== 200) {
|
||||
return { error: data.message };
|
||||
}
|
||||
|
||||
return data.event_types.map((eventType: EventType) => ({
|
||||
id: eventType.id,
|
||||
slug: eventType.slug,
|
||||
length: eventType.length,
|
||||
title: eventType.title,
|
||||
}));
|
||||
};
|
||||
|
||||
const getEventTypesTool = (apiKey: string) => {
|
||||
return new DynamicStructuredTool({
|
||||
description: "Get a user's event type IDs. Usually necessary to book a meeting.",
|
||||
func: async ({ userId }) => {
|
||||
return JSON.stringify(
|
||||
await fetchEventTypes({
|
||||
apiKey,
|
||||
userId,
|
||||
})
|
||||
);
|
||||
},
|
||||
name: "getEventTypes",
|
||||
schema: z.object({
|
||||
userId: z.number().optional().describe("The user ID. Defaults to the primary user's ID."),
|
||||
}),
|
||||
});
|
||||
};
|
||||
|
||||
export default getEventTypesTool;
|
|
@ -1,124 +0,0 @@
|
|||
import { DynamicStructuredTool } from "langchain/tools";
|
||||
import { z } from "zod";
|
||||
|
||||
import { env } from "~/src/env.mjs";
|
||||
import type { User, UserList } from "~/src/types/user";
|
||||
import sendEmail from "~/src/utils/sendEmail";
|
||||
|
||||
export const sendBookingEmail = async ({
|
||||
user,
|
||||
agentEmail,
|
||||
subject,
|
||||
to,
|
||||
message,
|
||||
eventTypeSlug,
|
||||
slots,
|
||||
date,
|
||||
}: {
|
||||
apiKey: string;
|
||||
user: User;
|
||||
users: UserList;
|
||||
agentEmail: string;
|
||||
subject: string;
|
||||
to: string;
|
||||
message: string;
|
||||
eventTypeSlug: string;
|
||||
slots?: {
|
||||
time: string;
|
||||
text: string;
|
||||
}[];
|
||||
date: {
|
||||
date: string;
|
||||
text: string;
|
||||
};
|
||||
}) => {
|
||||
// const url = `${env.FRONTEND_URL}/${user.username}/${eventTypeSlug}?date=${date}`;
|
||||
const timeUrls = slots?.map(({ time, text }) => {
|
||||
return {
|
||||
url: `${env.FRONTEND_URL}/${user.username}/${eventTypeSlug}?slot=${time}`,
|
||||
text,
|
||||
};
|
||||
});
|
||||
|
||||
const dateUrl = {
|
||||
url: `${env.FRONTEND_URL}/${user.username}/${eventTypeSlug}?date=${date.date}`,
|
||||
text: date.text,
|
||||
};
|
||||
|
||||
await sendEmail({
|
||||
subject,
|
||||
to,
|
||||
cc: user.email,
|
||||
from: agentEmail,
|
||||
text: message
|
||||
.split("[[[Slots]]]")
|
||||
.join(timeUrls?.map(({ url, text }) => `${text}: ${url}`).join("\n"))
|
||||
.split("[[[Link]]]")
|
||||
.join(`${dateUrl.text}: ${dateUrl.url}`),
|
||||
html: message
|
||||
.split("\n")
|
||||
.join("<br>")
|
||||
.split("[[[Slots]]]")
|
||||
.join(timeUrls?.map(({ url, text }) => `<a href="${url}">${text}</a>`).join("<br>"))
|
||||
.split("[[[Link]]]")
|
||||
.join(`<a href="${dateUrl.url}">${dateUrl.text}</a>`),
|
||||
});
|
||||
|
||||
return "Booking link sent";
|
||||
};
|
||||
|
||||
const sendBookingEmailTool = (apiKey: string, user: User, users: UserList, agentEmail: string) => {
|
||||
return new DynamicStructuredTool({
|
||||
description:
|
||||
"Send a booking link via email. Useful for scheduling with non cal users. Be confident, suggesting a good date/time with a fallback to a link to select a date/time.",
|
||||
func: async ({ message, subject, to, eventTypeSlug, slots, date }) => {
|
||||
return JSON.stringify(
|
||||
await sendBookingEmail({
|
||||
apiKey,
|
||||
user,
|
||||
users,
|
||||
agentEmail,
|
||||
subject,
|
||||
to,
|
||||
message,
|
||||
eventTypeSlug,
|
||||
slots,
|
||||
date,
|
||||
})
|
||||
);
|
||||
},
|
||||
name: "sendBookingEmail",
|
||||
|
||||
schema: z.object({
|
||||
message: z
|
||||
.string()
|
||||
.describe(
|
||||
"A polite and professional email with an intro and signature at the end. Specify you are the AI booking assistant of the primary user. Use [[[Slots]]] and a fallback [[[Link]]] to inject good times and 'see all times' into messages"
|
||||
),
|
||||
subject: z.string(),
|
||||
to: z
|
||||
.string()
|
||||
.describe("email address to send the booking link to. Primary user is automatically CC'd"),
|
||||
eventTypeSlug: z.string().describe("the slug of the event type to book"),
|
||||
slots: z
|
||||
.array(
|
||||
z.object({
|
||||
time: z.string().describe("YYYY-MM-DDTHH:mm in UTC"),
|
||||
text: z.string().describe("minimum readable label. Ex. 4pm."),
|
||||
})
|
||||
)
|
||||
.optional()
|
||||
.describe("Time slots the external user can click"),
|
||||
date: z
|
||||
.object({
|
||||
date: z.string().describe("YYYY-MM-DD"),
|
||||
text: z.string().describe('"See all times" or similar'),
|
||||
})
|
||||
.describe(
|
||||
"A booking link that allows the external user to select a date / time. Should be a fallback to time slots"
|
||||
),
|
||||
}),
|
||||
});
|
||||
};
|
||||
|
||||
export default sendBookingEmailTool;
|
|
@ -1,85 +0,0 @@
|
|||
import { DynamicStructuredTool } from "langchain/tools";
|
||||
import { z } from "zod";
|
||||
|
||||
import { env } from "../env.mjs";
|
||||
|
||||
/**
|
||||
* Edits a booking for a user by booking ID with new times, title, description, or status.
|
||||
*/
|
||||
const editBooking = async ({
|
||||
apiKey,
|
||||
userId,
|
||||
id,
|
||||
startTime, // In the docs it says start, but it's startTime: https://cal.com/docs/enterprise-features/api/api-reference/bookings#edit-an-existing-booking.
|
||||
endTime, // Same here: it says end but it's endTime.
|
||||
title,
|
||||
description,
|
||||
status,
|
||||
}: {
|
||||
apiKey: string;
|
||||
userId: number;
|
||||
id: string;
|
||||
startTime?: string;
|
||||
endTime?: string;
|
||||
title?: string;
|
||||
description?: string;
|
||||
status?: string;
|
||||
}): Promise<string | { error: string }> => {
|
||||
const params = {
|
||||
apiKey,
|
||||
userId: userId.toString(),
|
||||
};
|
||||
const urlParams = new URLSearchParams(params);
|
||||
|
||||
const url = `${env.BACKEND_URL}/bookings/${id}?${urlParams.toString()}`;
|
||||
|
||||
const response = await fetch(url, {
|
||||
body: JSON.stringify({ description, endTime, startTime, status, title }),
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
method: "PATCH",
|
||||
});
|
||||
|
||||
// Let GPT handle this. This will happen when wrong booking id is used.
|
||||
// if (response.status === 401) throw new Error("Unauthorized");
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (response.status !== 200) {
|
||||
return { error: data.message };
|
||||
}
|
||||
|
||||
return "Booking edited";
|
||||
};
|
||||
|
||||
const editBookingTool = (apiKey: string, userId: number) => {
|
||||
return new DynamicStructuredTool({
|
||||
description: "Edit a booking",
|
||||
func: async ({ description, endTime, id, startTime, status, title }) => {
|
||||
return JSON.stringify(
|
||||
await editBooking({
|
||||
apiKey,
|
||||
userId,
|
||||
description,
|
||||
endTime,
|
||||
id,
|
||||
startTime,
|
||||
status,
|
||||
title,
|
||||
})
|
||||
);
|
||||
},
|
||||
name: "editBooking",
|
||||
schema: z.object({
|
||||
description: z.string().optional(),
|
||||
endTime: z.string().optional(),
|
||||
id: z.string(),
|
||||
startTime: z.string().optional(),
|
||||
status: z.string().optional(),
|
||||
title: z.string().optional(),
|
||||
}),
|
||||
});
|
||||
};
|
||||
|
||||
export default editBookingTool;
|
|
@ -1,25 +0,0 @@
|
|||
export type Availability = {
|
||||
busy: {
|
||||
start: string;
|
||||
end: string;
|
||||
title?: string;
|
||||
}[];
|
||||
timeZone: string;
|
||||
dateRanges: {
|
||||
start: string;
|
||||
end: string;
|
||||
}[];
|
||||
workingHours: {
|
||||
days: number[];
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
userId: number;
|
||||
}[];
|
||||
dateOverrides: {
|
||||
date: string;
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
userId: number;
|
||||
};
|
||||
currentSeats: number;
|
||||
};
|
|
@ -1,23 +0,0 @@
|
|||
export enum BOOKING_STATUS {
|
||||
ACCEPTED = "ACCEPTED",
|
||||
PENDING = "PENDING",
|
||||
CANCELLED = "CANCELLED",
|
||||
REJECTED = "REJECTED",
|
||||
}
|
||||
|
||||
export type Booking = {
|
||||
id: number;
|
||||
userId: number;
|
||||
description: string | null;
|
||||
eventTypeId: number;
|
||||
uid: string;
|
||||
title: string;
|
||||
startTime: string;
|
||||
endTime: string;
|
||||
attendees: { email: string; name: string; timeZone: string; locale: string }[] | null;
|
||||
user: { email: string; name: string; timeZone: string; locale: string }[] | null;
|
||||
payment: { id: number; success: boolean; paymentOption: string }[];
|
||||
metadata: object | null;
|
||||
status: BOOKING_STATUS;
|
||||
responses: { email: string; name: string; location: string } | null;
|
||||
};
|
|
@ -1,13 +0,0 @@
|
|||
export type EventType = {
|
||||
id: number;
|
||||
title: string;
|
||||
length: number;
|
||||
metadata: object;
|
||||
slug: string;
|
||||
hosts: {
|
||||
userId: number;
|
||||
isFixed: boolean;
|
||||
}[];
|
||||
hidden: boolean;
|
||||
// ...
|
||||
};
|
|
@ -1,18 +0,0 @@
|
|||
import type { EventType } from "./eventType";
|
||||
import type { WorkingHours } from "./workingHours";
|
||||
|
||||
export type User = {
|
||||
id: number;
|
||||
email: string;
|
||||
username: string;
|
||||
timeZone: string;
|
||||
eventTypes: EventType[];
|
||||
workingHours: WorkingHours[];
|
||||
};
|
||||
|
||||
export type UserList = {
|
||||
id?: number;
|
||||
email?: string;
|
||||
username?: string;
|
||||
type: "fromUsername" | "fromEmail";
|
||||
}[];
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue