diff --git a/.agents/skills/ship/SKILL.md b/.agents/skills/ship/SKILL.md new file mode 100644 index 00000000000..6f9e6322521 --- /dev/null +++ b/.agents/skills/ship/SKILL.md @@ -0,0 +1,82 @@ +--- +name: ship +description: Commit, push, and open a PR to staging in one shot +--- + +# Ship Command + +You help ship code by creating commits, pushing to the remote branch, and creating PRs in the user's voice. + +## Your Task + +When the user runs `/ship`: + +1. **Check git status** - See what files have changed +2. **Generate a commit message** following this format: `type(scope): description` + - Types: `fix`, `feat`, `improvement`, `chore` + - Scope: short identifier (e.g., `undo-redo`, `api`, `ui`) + - Keep it concise + +3. **Run lint** - Run `bun run lint` from the repo root to fix formatting issues before staging + +4. **Stage and commit** the changes with the generated message + +5. **Push to origin** using the current branch name + +6. **Create a PR** to staging with a description in the user's voice + +## Commit Message Format + +Based on the repo's commit history: +``` +fix(scope): description for bug fixes +feat(scope): description for new features +improvement(scope): description for enhancements +chore(scope): description for maintenance +``` + +## PR Description Format + +Use this exact template in the user's voice (concise, bullet points): + +```markdown +## Summary +- bullet point describing what changed +- another bullet point if needed + +## Type of Change +- [x] Bug fix (or appropriate type) + +## Testing +Tested manually (or describe testing) + +## Checklist +- [x] Code follows project style guidelines +- [x] Self-reviewed my changes +- [ ] Tests added/updated and passing +- [x] No new warnings introduced +- [x] I confirm that I have read and agree to the terms outlined in the [Contributor License Agreement (CLA)](./CONTRIBUTING.md#contributor-license-agreement-cla) +``` + +## PR Creation Command + +Use this command structure: +```bash +gh pr create --base staging --title "COMMIT_MESSAGE" --body "PR_BODY" +``` + +## Important Notes + +- Always confirm the commit message and PR description with the user before executing +- The PR should be created against `staging` branch +- Keep descriptions concise and in active voice +- Match the user's previous PR style: direct, no fluff, bullet points +- **DO NOT add "Co-Authored-By" lines to commits** - keep commit messages clean + +## User's Voice Characteristics (based on previous PRs) + +- Short, direct bullet points +- No unnecessary explanation +- "Tested manually" is acceptable for testing section +- Checkboxes filled in appropriately +- No screenshots section unless UI changes diff --git a/.claude/commands/ship.md b/.claude/commands/ship.md new file mode 100644 index 00000000000..6f848cc4a8f --- /dev/null +++ b/.claude/commands/ship.md @@ -0,0 +1,82 @@ +--- +description: Commit, push, and open a PR to staging in one shot +argument-hint: [optional context or scope notes] +--- + +# Ship Command + +You help ship code by creating commits, pushing to the remote branch, and creating PRs in the user's voice. + +## Your Task + +When the user runs `/ship`: + +1. **Check git status** - See what files have changed +2. **Generate a commit message** following this format: `type(scope): description` + - Types: `fix`, `feat`, `improvement`, `chore` + - Scope: short identifier (e.g., `undo-redo`, `api`, `ui`) + - Keep it concise + +3. **Run lint** - Run `bun run lint` from the repo root to fix formatting issues before staging + +4. **Stage and commit** the changes with the generated message + +5. **Push to origin** using the current branch name + +6. **Create a PR** to staging with a description in the user's voice + +## Commit Message Format + +Based on the repo's commit history: +``` +fix(scope): description for bug fixes +feat(scope): description for new features +improvement(scope): description for enhancements +chore(scope): description for maintenance +``` + +## PR Description Format + +Use this exact template in the user's voice (concise, bullet points): + +```markdown +## Summary +- bullet point describing what changed +- another bullet point if needed + +## Type of Change +- [x] Bug fix (or appropriate type) + +## Testing +Tested manually (or describe testing) + +## Checklist +- [x] Code follows project style guidelines +- [x] Self-reviewed my changes +- [ ] Tests added/updated and passing +- [x] No new warnings introduced +- [x] I confirm that I have read and agree to the terms outlined in the [Contributor License Agreement (CLA)](./CONTRIBUTING.md#contributor-license-agreement-cla) +``` + +## PR Creation Command + +Use this command structure: +```bash +gh pr create --base staging --title "COMMIT_MESSAGE" --body "PR_BODY" +``` + +## Important Notes + +- Always confirm the commit message and PR description with the user before executing +- The PR should be created against `staging` branch +- Keep descriptions concise and in active voice +- Match the user's previous PR style: direct, no fluff, bullet points +- **DO NOT add "Co-Authored-By" lines to commits** - keep commit messages clean + +## User's Voice Characteristics (based on previous PRs) + +- Short, direct bullet points +- No unnecessary explanation +- "Tested manually" is acceptable for testing section +- Checkboxes filled in appropriately +- No screenshots section unless UI changes diff --git a/.cursor/commands/ship.md b/.cursor/commands/ship.md new file mode 100644 index 00000000000..41855257c44 --- /dev/null +++ b/.cursor/commands/ship.md @@ -0,0 +1,77 @@ +# Ship Command + +You help ship code by creating commits, pushing to the remote branch, and creating PRs in the user's voice. + +## Your Task + +When the user runs `/ship`: + +1. **Check git status** - See what files have changed +2. **Generate a commit message** following this format: `type(scope): description` + - Types: `fix`, `feat`, `improvement`, `chore` + - Scope: short identifier (e.g., `undo-redo`, `api`, `ui`) + - Keep it concise + +3. **Run lint** - Run `bun run lint` from the repo root to fix formatting issues before staging + +4. **Stage and commit** the changes with the generated message + +5. **Push to origin** using the current branch name + +6. **Create a PR** to staging with a description in the user's voice + +## Commit Message Format + +Based on the repo's commit history: +``` +fix(scope): description for bug fixes +feat(scope): description for new features +improvement(scope): description for enhancements +chore(scope): description for maintenance +``` + +## PR Description Format + +Use this exact template in the user's voice (concise, bullet points): + +```markdown +## Summary +- bullet point describing what changed +- another bullet point if needed + +## Type of Change +- [x] Bug fix (or appropriate type) + +## Testing +Tested manually (or describe testing) + +## Checklist +- [x] Code follows project style guidelines +- [x] Self-reviewed my changes +- [ ] Tests added/updated and passing +- [x] No new warnings introduced +- [x] I confirm that I have read and agree to the terms outlined in the [Contributor License Agreement (CLA)](./CONTRIBUTING.md#contributor-license-agreement-cla) +``` + +## PR Creation Command + +Use this command structure: +```bash +gh pr create --base staging --title "COMMIT_MESSAGE" --body "PR_BODY" +``` + +## Important Notes + +- Always confirm the commit message and PR description with the user before executing +- The PR should be created against `staging` branch +- Keep descriptions concise and in active voice +- Match the user's previous PR style: direct, no fluff, bullet points +- **DO NOT add "Co-Authored-By" lines to commits** - keep commit messages clean + +## User's Voice Characteristics (based on previous PRs) + +- Short, direct bullet points +- No unnecessary explanation +- "Tested manually" is acceptable for testing section +- Checkboxes filled in appropriately +- No screenshots section unless UI changes diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4605c9227c0..c1dc73e648f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -70,7 +70,7 @@ jobs: uses: actions/checkout@v4 - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@v6 with: role-to-assume: ${{ secrets.DEV_AWS_ROLE_TO_ASSUME }} aws-region: ${{ secrets.DEV_AWS_REGION }} @@ -80,7 +80,7 @@ jobs: uses: aws-actions/amazon-ecr-login@v2 - name: Login to Docker Hub - uses: docker/login-action@v3 + uses: docker/login-action@v4 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} @@ -135,7 +135,7 @@ jobs: uses: actions/checkout@v6 - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@v6 with: role-to-assume: ${{ github.ref == 'refs/heads/main' && secrets.AWS_ROLE_TO_ASSUME || secrets.STAGING_AWS_ROLE_TO_ASSUME }} aws-region: ${{ github.ref == 'refs/heads/main' && secrets.AWS_REGION || secrets.STAGING_AWS_REGION }} @@ -145,14 +145,14 @@ jobs: uses: aws-actions/amazon-ecr-login@v2 - name: Login to Docker Hub - uses: docker/login-action@v3 + uses: docker/login-action@v4 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Login to GHCR if: github.ref == 'refs/heads/main' - uses: docker/login-action@v3 + uses: docker/login-action@v4 with: registry: ghcr.io username: ${{ github.repository_owner }} @@ -234,7 +234,7 @@ jobs: uses: actions/checkout@v6 - name: Login to GHCR - uses: docker/login-action@v3 + uses: docker/login-action@v4 with: registry: ghcr.io username: ${{ github.repository_owner }} @@ -286,7 +286,7 @@ jobs: steps: - name: Login to GHCR - uses: docker/login-action@v3 + uses: docker/login-action@v4 with: registry: ghcr.io username: ${{ github.repository_owner }} diff --git a/.github/workflows/docs-embeddings.yml b/.github/workflows/docs-embeddings.yml index c59380f19fb..13e2febbd31 100644 --- a/.github/workflows/docs-embeddings.yml +++ b/.github/workflows/docs-embeddings.yml @@ -23,12 +23,12 @@ jobs: bun-version: 1.3.13 - name: Setup Node - uses: actions/setup-node@v4 + uses: actions/setup-node@v6 with: node-version: latest - name: Cache Bun dependencies - uses: actions/cache@v4 + uses: actions/cache@v5 with: path: | ~/.bun/install/cache diff --git a/.github/workflows/i18n.yml b/.github/workflows/i18n.yml index d0ea236373f..5f7b1dd0016 100644 --- a/.github/workflows/i18n.yml +++ b/.github/workflows/i18n.yml @@ -26,7 +26,7 @@ jobs: bun-version: 1.3.13 - name: Cache Bun dependencies - uses: actions/cache@v4 + uses: actions/cache@v5 with: path: | ~/.bun/install/cache @@ -125,7 +125,7 @@ jobs: bun-version: 1.3.13 - name: Cache Bun dependencies - uses: actions/cache@v4 + uses: actions/cache@v5 with: path: | ~/.bun/install/cache diff --git a/.github/workflows/images.yml b/.github/workflows/images.yml index 853ebc6881a..f1ed176d350 100644 --- a/.github/workflows/images.yml +++ b/.github/workflows/images.yml @@ -34,7 +34,7 @@ jobs: uses: actions/checkout@v6 - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@v6 with: role-to-assume: ${{ github.ref == 'refs/heads/main' && secrets.AWS_ROLE_TO_ASSUME || github.ref == 'refs/heads/dev' && secrets.DEV_AWS_ROLE_TO_ASSUME || secrets.STAGING_AWS_ROLE_TO_ASSUME }} aws-region: ${{ github.ref == 'refs/heads/main' && secrets.AWS_REGION || github.ref == 'refs/heads/dev' && secrets.DEV_AWS_REGION || secrets.STAGING_AWS_REGION }} @@ -44,14 +44,14 @@ jobs: uses: aws-actions/amazon-ecr-login@v2 - name: Login to Docker Hub - uses: docker/login-action@v3 + uses: docker/login-action@v4 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Login to GHCR if: github.ref == 'refs/heads/main' - uses: docker/login-action@v3 + uses: docker/login-action@v4 with: registry: ghcr.io username: ${{ github.repository_owner }} @@ -120,7 +120,7 @@ jobs: uses: actions/checkout@v6 - name: Login to GHCR - uses: docker/login-action@v3 + uses: docker/login-action@v4 with: registry: ghcr.io username: ${{ github.repository_owner }} @@ -160,7 +160,7 @@ jobs: steps: - name: Login to GHCR - uses: docker/login-action@v3 + uses: docker/login-action@v4 with: registry: ghcr.io username: ${{ github.repository_owner }} diff --git a/.github/workflows/migrations.yml b/.github/workflows/migrations.yml index f1078e5aa21..daf41a0d984 100644 --- a/.github/workflows/migrations.yml +++ b/.github/workflows/migrations.yml @@ -22,7 +22,7 @@ jobs: bun-version: 1.3.13 - name: Cache Bun dependencies - uses: actions/cache@v4 + uses: actions/cache@v5 with: path: | ~/.bun/install/cache diff --git a/.github/workflows/publish-cli.yml b/.github/workflows/publish-cli.yml index 466ae0423bf..d7bae3e7828 100644 --- a/.github/workflows/publish-cli.yml +++ b/.github/workflows/publish-cli.yml @@ -22,13 +22,13 @@ jobs: bun-version: 1.3.13 - name: Setup Node.js for npm publishing - uses: actions/setup-node@v4 + uses: actions/setup-node@v6 with: node-version: '18' registry-url: 'https://registry.npmjs.org/' - name: Cache Bun dependencies - uses: actions/cache@v4 + uses: actions/cache@v5 with: path: | ~/.bun/install/cache diff --git a/.github/workflows/publish-ts-sdk.yml b/.github/workflows/publish-ts-sdk.yml index d8f95242b86..2a527b7b42a 100644 --- a/.github/workflows/publish-ts-sdk.yml +++ b/.github/workflows/publish-ts-sdk.yml @@ -22,13 +22,13 @@ jobs: bun-version: 1.3.13 - name: Setup Node.js for npm publishing - uses: actions/setup-node@v4 + uses: actions/setup-node@v6 with: node-version: '22' registry-url: 'https://registry.npmjs.org/' - name: Cache Bun dependencies - uses: actions/cache@v4 + uses: actions/cache@v5 with: path: | ~/.bun/install/cache diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index 2164aebfa49..19c31f028a8 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -22,7 +22,7 @@ jobs: bun-version: 1.3.13 - name: Setup Node - uses: actions/setup-node@v4 + uses: actions/setup-node@v6 with: node-version: latest @@ -45,7 +45,7 @@ jobs: path: ./.turbo - name: Restore Next.js build cache - uses: actions/cache@v4 + uses: actions/cache@v5 with: path: ./apps/sim/.next/cache key: ${{ runner.os }}-nextjs-${{ hashFiles('bun.lock') }} diff --git a/apps/docs/components/icons.tsx b/apps/docs/components/icons.tsx index 35a34a66777..dae53828ccb 100644 --- a/apps/docs/components/icons.tsx +++ b/apps/docs/components/icons.tsx @@ -4045,6 +4045,7 @@ export function AsanaIcon(props: SVGProps) { } export function PipedriveIcon(props: SVGProps) { + const pathId = useId() return ( ) { ) { fillRule='evenodd' > - - - - + @@ -4098,6 +4096,40 @@ export function SalesforceIcon(props: SVGProps) { ) } +export function SapS4HanaIcon(props: SVGProps) { + const id = useId() + return ( + + + + + + + + + + + + + + ) +} + export function ServiceNowIcon(props: SVGProps) { return ( @@ -4694,15 +4726,16 @@ export function DynamoDBIcon(props: SVGProps) { } export function IAMIcon(props: SVGProps) { + const id = useId() return ( - + - + ) { } export function IdentityCenterIcon(props: SVGProps) { + const id = useId() return ( - + - + ) { } export function STSIcon(props: SVGProps) { + const id = useId() return ( - + - + ) { } export function SESIcon(props: SVGProps) { + const id = useId() return ( - + - + ) { } export function SecretsManagerIcon(props: SVGProps) { + const id = useId() return ( - + - + = { rootly: RootlyIcon, s3: S3Icon, salesforce: SalesforceIcon, + sap_s4hana: SapS4HanaIcon, search: SearchIcon, secrets_manager: SecretsManagerIcon, sendgrid: SendgridIcon, diff --git a/apps/docs/components/ui/video.tsx b/apps/docs/components/ui/video.tsx index 43e68e1c0c6..9e0e7f1189b 100644 --- a/apps/docs/components/ui/video.tsx +++ b/apps/docs/components/ui/video.tsx @@ -50,7 +50,7 @@ export function Video({ height={height} className={cn( className, - enableLightbox && 'cursor-pointer transition-opacity hover:opacity-95' + enableLightbox && 'cursor-pointer transition-opacity hover:opacity-[0.97]' )} src={getAssetUrl(src)} onClick={handleVideoClick} diff --git a/apps/docs/content/docs/en/credentials/index.mdx b/apps/docs/content/docs/en/credentials/index.mdx index b5bd02b6d1f..5ad99772a29 100644 --- a/apps/docs/content/docs/en/credentials/index.mdx +++ b/apps/docs/content/docs/en/credentials/index.mdx @@ -25,6 +25,8 @@ Secrets are organized into two sections: - **Workspace** — shared with all members of your workspace - **Personal** — private to you +External workspace members count as workspace members for workspace-scoped secrets. They can use workspace secrets according to their workspace permission level, even though they are not members of your organization. + ### Adding a Secret Type a key name (e.g. `OPENAI_API_KEY`) into the **Key** column and its value into the **Value** column in the last empty row. A new empty row appears automatically as you type. Existing values are masked by default. @@ -89,7 +91,7 @@ Click **Save** to apply changes, or **Back** to return to the list. | | Workspace | Personal | |---|---|---| -| **Visibility** | All workspace members | Only you | +| **Visibility** | All workspace members, including external workspace members | Only you | | **Use in workflows** | Any member can use | Only you can use | | **Best for** | Production workflows, shared services | Testing, personal API keys | | **Who can edit** | Workspace admins | Only you | diff --git a/apps/docs/content/docs/en/enterprise/access-control.mdx b/apps/docs/content/docs/en/enterprise/access-control.mdx index 0034a296aef..38aa1ec8dac 100644 --- a/apps/docs/content/docs/en/enterprise/access-control.mdx +++ b/apps/docs/content/docs/en/enterprise/access-control.mdx @@ -130,6 +130,8 @@ Controls visibility of platform features and modules. Open the group's **Details** view and add members by searching for users by name or email. Only users who already have workspace-level access can be added. A user can only belong to one group per workspace — adding a user to a new group within the same workspace removes them from their current group for that workspace. +External workspace members are treated like other workspace members for access-control purposes. They can be assigned to permission groups in any workspace they have access to, but they do not become organization members or appear in the organization roster. + --- ## Enforcement @@ -159,6 +161,7 @@ When a user opens Mothership, their permission group is read before any block or - Moving a user to a new group within a workspace automatically removes them from their previous group in that workspace. - Users not assigned to any group in a workspace have no restrictions applied in that workspace (all blocks, providers, and features are available to them there). - If **Auto-add new members** is enabled on a group, new members of that workspace are automatically placed in the group. Only one group per workspace can have this setting active. +- External workspace members follow the same per-workspace permission group rules as internal members. --- diff --git a/apps/docs/content/docs/en/enterprise/audit-logs.mdx b/apps/docs/content/docs/en/enterprise/audit-logs.mdx index 92ec41a489e..9bcf9dfb0ed 100644 --- a/apps/docs/content/docs/en/enterprise/audit-logs.mdx +++ b/apps/docs/content/docs/en/enterprise/audit-logs.mdx @@ -44,7 +44,7 @@ Authorization: Bearer | `resourceType` | string | Filter by resource type (e.g. `workflow`) | | `resourceId` | string | Filter by a specific resource ID | | `workspaceId` | string | Filter by workspace | -| `actorId` | string | Filter by user ID (must be an org member) | +| `actorId` | string | Filter by user ID. For organization-wide filters, the actor must be a current or former org member; workspace-scoped logs can also include external workspace members. | | `startDate` | string | ISO 8601 date — return logs on or after this date | | `endDate` | string | ISO 8601 date — return logs on or before this date | | `includeDeparted` | boolean | Include logs from members who have since left the organization (default `false`) | @@ -98,6 +98,8 @@ Audit log events follow a `resource.action` naming pattern. The table below list | **Credentials** | `credential.created`, `credential.deleted`, `oauth.disconnected` | | **Organization** | `organization.updated`, `org_member.added`, `org_member.role_changed` | +Workspace invitation events include whether the invite is for an internal organization member or an external workspace member in their metadata. External workspace members can appear as actors on workspace-scoped events, but they are not organization members and do not appear in the organization roster. + --- diff --git a/apps/docs/content/docs/en/enterprise/index.mdx b/apps/docs/content/docs/en/enterprise/index.mdx index 1c01d7872fa..e4f004c62b7 100644 --- a/apps/docs/content/docs/en/enterprise/index.mdx +++ b/apps/docs/content/docs/en/enterprise/index.mdx @@ -13,6 +13,8 @@ Sim Enterprise provides advanced features for organizations with enhanced securi Define permission groups on a workspace to control what features and integrations its members can use. Permission groups are scoped to a single workspace — a user can belong to different groups (or no group) in different workspaces. +External workspace members can be assigned to permission groups just like internal organization members, but they remain outside the organization roster and do not consume seats. + ### Features - **Allowed Model Providers** - Restrict which AI providers users can access (OpenAI, Anthropic, Google, etc.) @@ -81,4 +83,4 @@ Self-hosted deployments enable enterprise features via environment variables ins | `INBOX_ENABLED`, `NEXT_PUBLIC_INBOX_ENABLED` | Sim Mailer inbox | | `DISABLE_INVITATIONS`, `NEXT_PUBLIC_DISABLE_INVITATIONS` | Disable invitations; manage membership via Admin API | -Once enabled, each feature is configured through the same Settings UI as Sim Cloud. When invitations are disabled, use the Admin API (`x-admin-key` header) to manage organization and workspace membership. +Once enabled, each feature is configured through the same Settings UI as Sim Cloud. When invitations are disabled, use the Admin API (`x-admin-key` header) to manage organization membership and workspace access. Internal members join the organization; external workspace members only receive access to a specific workspace. diff --git a/apps/docs/content/docs/en/enterprise/sso.mdx b/apps/docs/content/docs/en/enterprise/sso.mdx index ca04c0b9185..bfccc204122 100644 --- a/apps/docs/content/docs/en/enterprise/sso.mdx +++ b/apps/docs/content/docs/en/enterprise/sso.mdx @@ -221,6 +221,8 @@ Once SSO is configured, users with your domain (`company.com`) can sign in throu Users who sign in via SSO for the first time are automatically provisioned and added to your organization — no manual invite required. +SSO provisioning creates internal organization members. External workspace members are different: they are invited to a specific workspace without joining your organization or consuming one of your seats. + Password-based login remains available. Forcing all organization members to use SSO exclusively is not yet supported. @@ -242,7 +244,7 @@ Users who sign in via SSO for the first time are automatically provisioned and a }, { question: "What happens when a user signs in with SSO for the first time?", - answer: "Sim creates an account for them automatically and adds them to your organization. No manual invite is needed. They are assigned the member role by default." + answer: "Sim creates an account for them automatically and adds them to your organization. No manual invite is needed. They are assigned the member role by default. External workspace members are not provisioned through SSO into your organization; they are invited directly to a workspace and remain outside your org roster." }, { question: "Can I still use email/password login after enabling SSO?", diff --git a/apps/docs/content/docs/en/execution/costs.mdx b/apps/docs/content/docs/en/execution/costs.mdx index 3028a79f983..a08b8747901 100644 --- a/apps/docs/content/docs/en/execution/costs.mdx +++ b/apps/docs/content/docs/en/execution/costs.mdx @@ -272,6 +272,8 @@ Sim has two paid plan tiers - **Pro** and **Max**. Either can be used individual To use Pro or Max with a team, select **Get For Team** in subscription settings and choose the tier and number of seats. Credits are pooled across the organization at the per-seat rate (e.g. Max for Teams with 3 seats = 75,000 credits/mo pooled). +Internal organization members use seats and contribute to the team's pooled credit allocation. External workspace members do not join your organization, do not appear in the organization roster, and do not count toward your seat total. + ### Daily Refresh Credits Paid plans include a small daily credit allowance that does not count toward your plan limit. Each day, usage up to the daily refresh amount is excluded from billable usage. This allowance resets every 24 hours and does not carry over - use it or lose it. @@ -317,7 +319,7 @@ By default, your usage is capped at the credits included in your plan. To allow | **Max** | Up to 10 | — | | **Team / Enterprise** | Unlimited | Unlimited | -Team and Enterprise plans unlock shared workspaces that belong to your organization. Members invited to a shared workspace automatically join the organization and count toward your seat total. When a Team or Enterprise subscription is cancelled or downgraded, existing shared workspaces remain accessible to current members but new invites are disabled until the organization is upgraded again. +Team and Enterprise plans unlock shared workspaces that belong to your organization. Internal members invited to a shared workspace join the organization and count toward your seat total. Existing Sim users who already belong to another organization can be added as external workspace members; they get workspace access without joining your organization or using one of your seats. When a Team or Enterprise subscription is cancelled or downgraded, existing shared workspaces remain accessible to current members but new invites are disabled until the organization is upgraded again. ### Rate Limits @@ -368,7 +370,8 @@ Sim uses a **base subscription + overage** billing model: - Example: 7,000 credits used = $25 (subscription) + $5 (overage for 1,000 extra credits at $0.005/credit) **Team Plans:** -- Usage is pooled across all team members in the organization +- Usage is pooled across internal team members in the organization +- External workspace members keep their own organization or personal billing context for runs where they are the billing actor - Overage is calculated from total team usage against the pooled limit - Organization owner receives one bill diff --git a/apps/docs/content/docs/en/mailer/index.mdx b/apps/docs/content/docs/en/mailer/index.mdx index e21f979f285..420e481d688 100644 --- a/apps/docs/content/docs/en/mailer/index.mdx +++ b/apps/docs/content/docs/en/mailer/index.mdx @@ -42,6 +42,8 @@ Only authorized senders can create tasks. Emails from anyone else are automatica - **Workspace members** are allowed by default — no setup needed - **External senders** can be added manually with an optional label for easy identification +External senders are email addresses that can create inbox tasks. They are not the same as external workspace members, who have workspace access in Sim without joining your organization. + Manage your allowed senders list in **Settings** → **Inbox** → **Allowed Senders**. ## Tracking Tasks diff --git a/apps/docs/content/docs/en/permissions/roles-and-permissions.mdx b/apps/docs/content/docs/en/permissions/roles-and-permissions.mdx index fb048651eab..28cfe8df87e 100644 --- a/apps/docs/content/docs/en/permissions/roles-and-permissions.mdx +++ b/apps/docs/content/docs/en/permissions/roles-and-permissions.mdx @@ -12,7 +12,7 @@ When you invite team members to your organization or workspace, you'll need to c Sim has two kinds of workspaces: - **Personal workspaces** live under your individual account. The number you can create depends on your plan. -- **Shared (organization) workspaces** live under an organization and are available on Team and Enterprise plans. Any organization Owner or Admin can create them. Members invited to a shared workspace automatically join the organization and count toward your seat total. +- **Shared (organization) workspaces** live under an organization and are available on Team and Enterprise plans. Any organization Owner or Admin can create them. Internal members invited to a shared workspace join the organization and count toward your seat total. Existing Sim users who already belong to another organization can be added as external workspace members instead, giving them access to the workspace without adding them to your organization roster or using one of your seats. ### Workspace Limits by Plan @@ -43,6 +43,15 @@ When inviting someone to a workspace, you can assign one of three permission lev | **Write** | Create and edit workflows, run workflows, manage environment variables | | **Admin** | Everything Write can do, plus invite/remove users and manage workspace settings | +## Internal Members vs External Workspace Members + +Workspace permissions are separate from organization membership: + +- **Internal organization members** belong to your organization, appear in the organization roster, and count toward your seat total. Invite new teammates this way when they should be part of your company or team in Sim. +- **External workspace members** have access only to the workspace they are invited to. They keep their own organization membership, do not appear in your organization roster, and do not count toward your organization's seats. Use external access for clients, partners, contractors, or collaborators who already use Sim in another organization. + +External workspace members still receive a workspace permission level — Read, Write, or Admin — and that permission controls what they can do inside the workspace. + ## What Each Permission Level Can Do Here's a detailed breakdown of what users can do with each permission level: @@ -126,7 +135,7 @@ Every workspace has one **Owner** (the person who created it) plus any number of 2. **Workspace level**: Give them **Admin** permission so they can manage the team and see everything ### Adding a Stakeholder or Client -1. **Organization level**: Invite them as an **Organization Member** +1. **Organization level**: If they should not join your organization, add them as an **External workspace member** 2. **Workspace level**: Give them **Read** permission so they can see progress but not make changes --- @@ -199,12 +208,12 @@ An organization has three roles: **Owner**, **Admin**, and **Member**. import { FAQ } from '@/components/ui/faq' \ No newline at end of file diff --git a/apps/docs/content/docs/en/quick-reference/index.mdx b/apps/docs/content/docs/en/quick-reference/index.mdx index 1831918b835..f44ab7da0a8 100644 --- a/apps/docs/content/docs/en/quick-reference/index.mdx +++ b/apps/docs/content/docs/en/quick-reference/index.mdx @@ -31,7 +31,7 @@ A quick lookup for everyday actions in the Sim workflow editor. For keyboard sho
Invite team members - Sidebar → **Invite** + Sidebar → **Invite**. Internal invites join the organization; external workspace members get workspace access only.
diff --git a/apps/docs/content/docs/en/tools/browser_use.mdx b/apps/docs/content/docs/en/tools/browser_use.mdx index c8e5df7ec5e..26c1bc1e503 100644 --- a/apps/docs/content/docs/en/tools/browser_use.mdx +++ b/apps/docs/content/docs/en/tools/browser_use.mdx @@ -42,9 +42,18 @@ Runs a browser automation task using BrowserUse | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | | `task` | string | Yes | What should the browser agent do | -| `variables` | json | No | Optional variables to use as secrets \(format: \{key: value\}\) | -| `save_browser_data` | boolean | No | Whether to save browser data | -| `model` | string | No | LLM model to use \(default: gpt-4o\) | +| `startUrl` | string | No | Initial page URL to start the agent on \(reduces navigation steps\) | +| `variables` | json | No | Optional secrets injected into the task \(format: \{key: value\}\) | +| `allowedDomains` | string | No | Comma-separated list of domains the agent is allowed to visit | +| `maxSteps` | number | No | Maximum number of steps the agent may take \(default 100, max 10000\) | +| `flashMode` | boolean | No | Enable flash mode \(faster, less careful navigation\) | +| `thinking` | boolean | No | Enable extended reasoning mode | +| `vision` | string | No | Vision capability: "true", "false", or "auto" | +| `systemPromptExtension` | string | No | Optional text appended to the agent system prompt \(max 2000 chars\) | +| `structuredOutput` | string | No | Stringified JSON schema for the structured output | +| `highlightElements` | boolean | No | Highlight interactive elements on the page \(default true\) | +| `metadata` | json | No | Custom key-value metadata \(up to 10 pairs\) for tracking | +| `model` | string | No | LLM model identifier \(e.g. browser-use-2.0\) | | `apiKey` | string | Yes | API key for BrowserUse API | | `profile_id` | string | No | Browser profile ID for persistent sessions \(cookies, login state\) | @@ -54,7 +63,18 @@ Runs a browser automation task using BrowserUse | --------- | ---- | ----------- | | `id` | string | Task execution identifier | | `success` | boolean | Task completion status | -| `output` | json | Task output data | -| `steps` | json | Execution steps taken | +| `output` | json | Final task output \(string or structured\) | +| `steps` | array | Steps the agent executed \(number, memory, nextGoal, url, actions, duration\) | +| ↳ `number` | number | Sequential step number | +| ↳ `memory` | string | Agent memory at this step | +| ↳ `evaluationPreviousGoal` | string | Evaluation of previous goal completion | +| ↳ `nextGoal` | string | Goal for the next step | +| ↳ `url` | string | Current URL of the browser | +| ↳ `screenshotUrl` | string | Optional screenshot URL | +| ↳ `actions` | array | Stringified JSON actions performed | +| ↳ `duration` | number | Step duration in seconds | +| `liveUrl` | string | Embeddable live browser session URL \(active during execution\) | +| `shareUrl` | string | Public shareable URL for the recorded session \(post-run\) | +| `sessionId` | string | Browser Use session identifier | diff --git a/apps/docs/content/docs/en/tools/meta.json b/apps/docs/content/docs/en/tools/meta.json index f7ce46bd766..1f780cff3d2 100644 --- a/apps/docs/content/docs/en/tools/meta.json +++ b/apps/docs/content/docs/en/tools/meta.json @@ -150,6 +150,7 @@ "rootly", "s3", "salesforce", + "sap_s4hana", "search", "secrets_manager", "sendgrid", diff --git a/apps/docs/content/docs/en/tools/sap_s4hana.mdx b/apps/docs/content/docs/en/tools/sap_s4hana.mdx new file mode 100644 index 00000000000..0c8aaf5c745 --- /dev/null +++ b/apps/docs/content/docs/en/tools/sap_s4hana.mdx @@ -0,0 +1,1212 @@ +--- +title: SAP S4HANA +description: Read and write SAP S4HANA Cloud business data via OData +--- + +import { BlockInfoCard } from "@/components/ui/block-info-card" + + + +{/* MANUAL-CONTENT-START:intro */} +[SAP S4HANA](https://www.sap.com/products/erp/s4hana.html) is SAP's flagship intelligent ERP suite, running on the in-memory HANA database. It powers finance, supply chain, procurement, sales, and manufacturing for organizations of every size, and exposes its business data through a broad catalog of OData services on SAP Business Technology Platform (BTP). + +With SAP S4HANA, you can: + +- **Run core business processes**: Manage finance, procurement, sales, logistics, inventory, and manufacturing on a single source of truth. +- **Model master data at scale**: Maintain business partners, customers, suppliers, products, and organizational structures across multiple company codes, sales organizations, and plants. +- **Execute transactional flows end to end**: Create and update sales orders, purchase requisitions, purchase orders, deliveries, billing documents, supplier invoices, and stock movements with full audit trails. +- **Govern access cleanly**: Use Communication Arrangements, Communication Systems, and Communication Scopes to scope OAuth client credentials to exactly the services each integration needs. +- **Integrate via standard OData**: Every entity supported here speaks OData v2 with consistent paging, filtering, expansion, and ETag-based optimistic concurrency. + +In Sim, the SAP S4HANA integration lets your agents read and write directly against your tenant's OData services using per-tenant OAuth 2.0 client credentials. Agents can list and fetch master data, create and update transactional documents, run stock and material document queries, and execute arbitrary OData v2 calls against any whitelisted Communication Scenario — all routed through a single internal proxy that handles token acquisition, CSRF fetch-and-retry, and OData error normalization. Use it to automate order-to-cash, procure-to-pay, and inventory workflows, keep SAP in sync with the rest of your stack, or trigger downstream agent logic from SAP business events. +{/* MANUAL-CONTENT-END */} + + +## Usage Instructions + +{/* MANUAL-CONTENT-START:usage */} +Connect any SAP S4HANA tenant — **Cloud Public Edition**, **Cloud Private Edition (RISE)**, or **on-premise** — and read or write business data through the official OData v2 services. Each tool routes through a single internal proxy that handles token acquisition, CSRF fetch-and-retry for write operations, and OData error normalization. + +### Deployment modes + +Pick the deployment that matches your tenant in the **Deployment** dropdown: + +- **S4HANA Cloud Public Edition** — provide your **BTP subaccount subdomain** and **region** (e.g., `eu10`, `us10`). The host is derived automatically as `{subdomain}-api.s4hana.ondemand.com`, and OAuth tokens are fetched from the matching BTP UAA endpoint. Authentication is OAuth 2.0 client credentials configured in a Communication Arrangement. +- **S4HANA Cloud Private Edition (RISE)** — provide your **OData Base URL** (e.g., `https://my-tenant.s4hana.cloud.sap`). Authenticate with **OAuth 2.0 client credentials** (provide the tenant's UAA `tokenUrl`, `clientId`, `clientSecret`) or **HTTP Basic** with a Communication User (`username`, `password`). +- **On-premise S4HANA** — provide your **OData Base URL** (e.g., `https://sap.internal.company.com:44300`). Authenticate with **OAuth 2.0 client credentials** issued by your on-prem identity provider, or **HTTP Basic** with a service user. + +### What you can do + +Read and create business partners, customers, suppliers, sales orders, deliveries (inbound/outbound), billing documents, products, stock and material documents, purchase requisitions, purchase orders, and supplier invoices. Update business partners, customers, suppliers, products, sales orders, purchase orders, and purchase requisitions with PATCH. Run arbitrary OData v2 queries against any whitelisted Communication Scenario or registered service. + +### Optimistic concurrency + +All update tools accept an optional `ifMatch` ETag. When omitted, `If-Match` defaults to a wildcard (unconditional). For safe concurrent updates, fetch the entity first, capture its ETag from the response, and pass it as `ifMatch` to detect lost updates. +{/* MANUAL-CONTENT-END */} + + +Connect SAP S4HANA Cloud Public Edition with per-tenant OAuth 2.0 client credentials configured in your Communication Arrangements. Read and create business partners, customers, suppliers, sales orders, deliveries (inbound/outbound), billing documents, products, stock and material documents, purchase requisitions, purchase orders, and supplier invoices, or run arbitrary OData v2 queries against any whitelisted Communication Scenario. + + + +## Tools + +### `sap_s4hana_list_business_partners` + +List business partners from SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_BusinessPartner) with optional OData $filter, $top, $skip, $orderby, $select, $expand. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `filter` | string | No | OData $filter expression \(e.g., "BusinessPartnerCategory eq \'1\'"\) | +| `top` | number | No | Maximum results to return \($top\) | +| `skip` | number | No | Number of results to skip \($skip\) | +| `orderBy` | string | No | OData $orderby expression | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \($expand\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | Array of A_BusinessPartner entities | + +### `sap_s4hana_get_business_partner` + +Retrieve a single business partner by BusinessPartner key from SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_BusinessPartner). + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `businessPartner` | string | Yes | BusinessPartner key \(string, up to 10 characters\) | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \($expand\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | A_BusinessPartner entity | + +### `sap_s4hana_create_business_partner` + +Create a business partner in SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_BusinessPartner). For Person category 1 provide FirstName and LastName. For Organization category 2 provide OrganizationBPName1. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `businessPartnerCategory` | string | Yes | BusinessPartnerCategory: "1" Person, "2" Organization, "3" Group | +| `businessPartnerGrouping` | string | Yes | BusinessPartnerGrouping \(number range / role grouping configured in S/4HANA, e.g. "0001"\) | +| `firstName` | string | No | FirstName \(required for Person\) | +| `lastName` | string | No | LastName \(required for Person\) | +| `organizationBPName1` | string | No | OrganizationBPName1 \(required for Organization\) | +| `body` | json | No | Optional additional A_BusinessPartner fields merged into the create payload | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | Created A_BusinessPartner entity | + +### `sap_s4hana_update_business_partner` + +Update fields on an A_BusinessPartner entity in SAP S/4HANA Cloud (API_BUSINESS_PARTNER). PATCH only sends the fields you provide; existing values are preserved. If-Match defaults to a wildcard (unconditional) — for safe concurrent updates pass the ETag from a prior GET to avoid lost updates. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `businessPartner` | string | Yes | BusinessPartner key to update \(string, up to 10 characters\) | +| `body` | json | Yes | JSON object with A_BusinessPartner fields to update \(e.g., \{"FirstName":"Jane","SearchTerm1":"VIP"\}\) | +| `ifMatch` | string | No | If-Match ETag for optimistic concurrency. Defaults to "*" \(unconditional\). | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP \(204 on success\) | +| `data` | json | Null on 204 success, or updated A_BusinessPartner entity if SAP returns one | + +### `sap_s4hana_list_customers` + +List customers from SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_Customer) with optional OData $filter, $top, $skip, $orderby, $select, $expand. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `filter` | string | No | OData $filter expression \(e.g., "CustomerAccountGroup eq \'Z001\'"\) | +| `top` | number | No | Maximum results to return \($top\) | +| `skip` | number | No | Number of results to skip \($skip\) | +| `orderBy` | string | No | OData $orderby expression | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \(e.g., "to_CustomerCompany,to_CustomerSalesArea"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | Array of A_Customer entities | + +### `sap_s4hana_get_customer` + +Retrieve a single customer by Customer key from SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_Customer). + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `customer` | string | Yes | Customer key \(string, up to 10 characters\) | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \(e.g., "to_CustomerCompany,to_CustomerSalesArea"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | A_Customer entity | + +### `sap_s4hana_update_customer` + +Update fields on an A_Customer entity in SAP S/4HANA Cloud (API_BUSINESS_PARTNER). PATCH only sends the fields you provide; existing values are preserved. A_Customer PATCH is limited to modifiable fields such as OrderIsBlockedForCustomer, DeliveryIsBlock, BillingIsBlockedForCustomer, PostingIsBlocked, and DeletionIndicator. If-Match defaults to a wildcard - for safe concurrent updates pass the ETag from a prior GET to avoid lost updates. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `customer` | string | Yes | Customer key to update \(string, up to 10 characters\) | +| `body` | json | Yes | JSON object with A_Customer fields to update \(e.g., \{"OrderIsBlockedForCustomer":true,"DeletionIndicator":false\}\) | +| `ifMatch` | string | No | If-Match ETag for optimistic concurrency. Defaults to "*" \(unconditional\). | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP \(204 on success\) | +| `data` | json | Null on 204 success, or updated A_Customer entity if SAP returns one | + +### `sap_s4hana_list_suppliers` + +List suppliers from SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_Supplier) with optional OData $filter, $top, $skip, $orderby, $select, $expand. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `filter` | string | No | OData $filter expression \(e.g., "SupplierAccountGroup eq \'BP02\'"\) | +| `top` | number | No | Maximum results to return \($top\) | +| `skip` | number | No | Number of results to skip \($skip\) | +| `orderBy` | string | No | OData $orderby expression | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \(e.g., "to_SupplierCompany,to_SupplierPurchasingOrg"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | Array of A_Supplier entities | + +### `sap_s4hana_get_supplier` + +Retrieve a single supplier by Supplier key from SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_Supplier). + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `supplier` | string | Yes | Supplier key \(string, up to 10 characters\) | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \(e.g., "to_SupplierCompany,to_SupplierPurchasingOrg"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | A_Supplier entity | + +### `sap_s4hana_update_supplier` + +Update fields on an A_Supplier entity in SAP S/4HANA Cloud (API_BUSINESS_PARTNER). PATCH only sends the fields you provide; existing values are preserved. A_Supplier PATCH is limited to modifiable fields such as PostingIsBlocked, PurchasingIsBlocked, PaymentIsBlockedForSupplier, DeletionIndicator, and SupplierAccountGroup. If-Match defaults to a wildcard - for safe concurrent updates pass the ETag from a prior GET to avoid lost updates. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `supplier` | string | Yes | Supplier key to update \(string, up to 10 characters\) | +| `body` | json | Yes | JSON object with A_Supplier fields to update \(e.g., \{"PaymentIsBlockedForSupplier":true,"PostingIsBlocked":true\}\) | +| `ifMatch` | string | No | If-Match ETag for optimistic concurrency. Defaults to "*" \(unconditional\). | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP \(204 on success\) | +| `data` | json | Null on 204 success, or updated A_Supplier entity if SAP returns one | + +### `sap_s4hana_list_sales_orders` + +List sales orders from SAP S/4HANA Cloud (API_SALES_ORDER_SRV, A_SalesOrder) with optional OData $filter, $top, $skip, $orderby, $select, $expand. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `filter` | string | No | OData $filter expression \(e.g., "SalesOrganization eq \'1010\'"\) | +| `top` | number | No | Maximum results to return \($top\) | +| `skip` | number | No | Number of results to skip \($skip\) | +| `orderBy` | string | No | OData $orderby expression | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \(e.g., "to_Item,to_Partner"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | Array of A_SalesOrder entities | + +### `sap_s4hana_get_sales_order` + +Retrieve a single sales order by SalesOrder key from SAP S/4HANA Cloud (API_SALES_ORDER_SRV, A_SalesOrder). + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `salesOrder` | string | Yes | SalesOrder key \(string, up to 10 characters\) | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \(e.g., "to_Item"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | A_SalesOrder entity | + +### `sap_s4hana_create_sales_order` + +Create a sales order in SAP S/4HANA Cloud (API_SALES_ORDER_SRV, A_SalesOrder) with deep insert of sales order items via to_Item. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `salesOrderType` | string | Yes | SalesOrderType \(e.g., "OR" Standard Order\) | +| `salesOrganization` | string | Yes | SalesOrganization \(4 chars, e.g., "1010"\) | +| `distributionChannel` | string | Yes | DistributionChannel \(2 chars, e.g., "10"\) | +| `organizationDivision` | string | Yes | OrganizationDivision \(2 chars, e.g., "00"\) | +| `soldToParty` | string | Yes | SoldToParty business partner key \(up to 10 chars\) | +| `items` | json | Yes | Array of sales order items for to_Item deep insert. Each item should include Material and RequestedQuantity \(e.g., \[\{"Material":"TG11","RequestedQuantity":"1"\}\]\). | +| `body` | json | No | Optional additional A_SalesOrder fields merged into the create payload | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | Created A_SalesOrder entity \(with deep-inserted items if expanded by SAP\) | + +### `sap_s4hana_update_sales_order` + +Update fields on an A_SalesOrder entity in SAP S/4HANA Cloud (API_SALES_ORDER_SRV). PATCH only sends the fields you provide; existing values are preserved. If-Match defaults to a wildcard (unconditional) — for safe concurrent updates pass the ETag from a prior GET to avoid lost updates. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `salesOrder` | string | Yes | SalesOrder key to update \(string, up to 10 characters\) | +| `body` | json | Yes | JSON object with A_SalesOrder fields to update \(e.g., \{"PurchaseOrderByCustomer":"PO-12345","HeaderBillingBlockReason":"01"\}\) | +| `ifMatch` | string | No | If-Match ETag for optimistic concurrency. Defaults to "*" \(unconditional\). | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP \(204 on success\) | +| `data` | json | Null on 204 success, or updated A_SalesOrder entity if SAP returns one | + +### `sap_s4hana_delete_sales_order` + +Delete an A_SalesOrder entity in SAP S/4HANA Cloud (API_SALES_ORDER_SRV). Only orders without subsequent documents (deliveries, invoices) can be deleted; otherwise reject items via update instead. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `salesOrder` | string | Yes | SalesOrder key to delete \(string, up to 10 characters\) | +| `ifMatch` | string | No | If-Match ETag for optimistic concurrency. Defaults to "*" \(unconditional\). | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP \(204 on success\) | +| `data` | json | Null on successful deletion | + +### `sap_s4hana_list_outbound_deliveries` + +List outbound deliveries from SAP S/4HANA Cloud (API_OUTBOUND_DELIVERY_SRV;v=0002, A_OutbDeliveryHeader) with optional OData $filter, $top, $skip, $orderby, $select, $expand. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `filter` | string | No | OData $filter expression \(e.g., "OverallDeliveryStatus eq \'C\'"\) | +| `top` | number | No | Maximum results to return \($top\) | +| `skip` | number | No | Number of results to skip \($skip\) | +| `orderBy` | string | No | OData $orderby expression | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \(e.g., "to_DeliveryDocumentItem"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | Array of A_OutbDeliveryHeader entities | + +### `sap_s4hana_get_outbound_delivery` + +Retrieve a single outbound delivery by DeliveryDocument key from SAP S/4HANA Cloud (API_OUTBOUND_DELIVERY_SRV;v=0002, A_OutbDeliveryHeader). + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `deliveryDocument` | string | Yes | DeliveryDocument key \(string, up to 10 characters\) | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \(e.g., "to_DeliveryDocumentItem"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | A_OutbDeliveryHeader entity | + +### `sap_s4hana_list_inbound_deliveries` + +List inbound deliveries from SAP S/4HANA Cloud (API_INBOUND_DELIVERY_SRV;v=0002, A_InbDeliveryHeader) with optional OData $filter, $top, $skip, $orderby, $select, $expand. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `filter` | string | No | OData $filter expression \(e.g., "ReceivingPlant eq \'1010\'"\) | +| `top` | number | No | Maximum results to return \($top\) | +| `skip` | number | No | Number of results to skip \($skip\) | +| `orderBy` | string | No | OData $orderby expression | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \(e.g., "to_DeliveryDocumentItem"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | Array of A_InbDeliveryHeader entities | + +### `sap_s4hana_get_inbound_delivery` + +Retrieve a single inbound delivery by DeliveryDocument key from SAP S/4HANA Cloud (API_INBOUND_DELIVERY_SRV;v=0002, A_InbDeliveryHeader). + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `deliveryDocument` | string | Yes | DeliveryDocument key \(string, up to 10 characters\) | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \(e.g., "to_DeliveryDocumentItem"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | A_InbDeliveryHeader entity | + +### `sap_s4hana_list_billing_documents` + +List billing documents (customer invoices) from SAP S/4HANA Cloud (API_BILLING_DOCUMENT_SRV, A_BillingDocument) with optional OData $filter, $top, $skip, $orderby, $select, $expand. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `filter` | string | No | OData $filter expression \(e.g., "SoldToParty eq \'10100001\'"\) | +| `top` | number | No | Maximum results to return \($top\) | +| `skip` | number | No | Number of results to skip \($skip\) | +| `orderBy` | string | No | OData $orderby expression | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \(e.g., "to_Item,to_Partner"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | Array of A_BillingDocument entities | + +### `sap_s4hana_get_billing_document` + +Retrieve a single billing document (customer invoice) by BillingDocument key from SAP S/4HANA Cloud (API_BILLING_DOCUMENT_SRV, A_BillingDocument). + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `billingDocument` | string | Yes | BillingDocument key \(string, up to 10 characters\) | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \(e.g., "to_Item,to_Partner"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | A_BillingDocument entity | + +### `sap_s4hana_list_products` + +List products (materials) from SAP S/4HANA Cloud (API_PRODUCT_SRV, A_Product) with optional OData $filter, $top, $skip, $orderby, $select, $expand. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `filter` | string | No | OData $filter expression \(e.g., "ProductType eq \'FERT\'"\) | +| `top` | number | No | Maximum results to return \($top\) | +| `skip` | number | No | Number of results to skip \($skip\) | +| `orderBy` | string | No | OData $orderby expression | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \($expand\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | Array of A_Product entities | + +### `sap_s4hana_get_product` + +Retrieve a single product (material) by Product key from SAP S/4HANA Cloud (API_PRODUCT_SRV, A_Product). + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `product` | string | Yes | Product key \(string, up to 40 characters\) | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \(e.g., "to_Description"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | A_Product entity | + +### `sap_s4hana_update_product` + +Update fields on an A_Product entity in SAP S/4HANA Cloud (API_PRODUCT_SRV). PATCH only sends the fields you provide; existing values are preserved. Flat scalar header fields only — deep/multi-entity updates across navigation properties are not supported by API_PRODUCT_SRV PATCH/PUT (see SAP KBA 2833338); update child entities (plant, valuation, sales data, etc.) via their own endpoints. If-Match defaults to a wildcard (unconditional) — for safe concurrent updates pass the ETag from a prior GET. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `product` | string | Yes | Product key to update \(string, up to 40 characters\) | +| `body` | json | Yes | JSON object with A_Product fields to update \(e.g., \{"ProductGroup":"L001","IsMarkedForDeletion":false\}\) | +| `ifMatch` | string | No | If-Match ETag for optimistic concurrency. Defaults to "*" \(unconditional\). | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP \(204 on success\) | +| `data` | json | Null on 204 success, or updated A_Product entity if SAP returns one | + +### `sap_s4hana_list_material_stock` + +List material stock quantities from SAP S/4HANA Cloud (API_MATERIAL_STOCK_SRV, A_MatlStkInAcctMod). The entity uses an 11-field composite key (Material, Plant, StorageLocation, Batch, Supplier, Customer, WBSElementInternalID, SDDocument, SDDocumentItem, InventorySpecialStockType, InventoryStockType) — query with $filter on these fields instead of a direct key lookup. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `filter` | string | No | OData $filter expression \(e.g., \"Material eq 'TG10' and Plant eq '1010' and InventoryStockType eq '01'\"\) | +| `top` | number | No | Maximum results to return \($top\) | +| `skip` | number | No | Number of results to skip \($skip\) | +| `orderBy` | string | No | OData $orderby expression | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \($expand\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | Array of A_MatlStkInAcctMod stock entries | + +### `sap_s4hana_list_material_documents` + +List material document headers (goods movements) from SAP S/4HANA Cloud (API_MATERIAL_DOCUMENT_SRV, A_MaterialDocumentHeader) with optional OData $filter, $top, $skip, $orderby, $select, $expand. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `filter` | string | No | OData $filter expression \(e.g., \"MaterialDocumentYear eq '2024' and PostingDate ge datetime'2024-01-01T00:00:00'\"\) | +| `top` | number | No | Maximum results to return \($top\) | +| `skip` | number | No | Number of results to skip \($skip\) | +| `orderBy` | string | No | OData $orderby expression | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \(e.g., "to_MaterialDocumentItem"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | Array of A_MaterialDocumentHeader entities | + +### `sap_s4hana_get_material_document` + +Retrieve a single material document header by composite key (MaterialDocument + MaterialDocumentYear) from SAP S/4HANA Cloud (API_MATERIAL_DOCUMENT_SRV, A_MaterialDocumentHeader). + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `materialDocumentYear` | string | Yes | MaterialDocumentYear \(4-character year, e.g., "2024"\) | +| `materialDocument` | string | Yes | MaterialDocument key \(string, up to 10 characters\) | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \(e.g., "to_MaterialDocumentItem"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | A_MaterialDocumentHeader entity | + +### `sap_s4hana_list_purchase_requisitions` + +List purchase requisitions from SAP S/4HANA Cloud (API_PURCHASEREQ_PROCESS_SRV, A_PurchaseRequisitionHeader) with optional OData $filter, $top, $skip, $orderby, $select, $expand. Note: API_PURCHASEREQ_PROCESS_SRV is deprecated since S/4HANA Cloud Public Edition 2402; the successor is API_PURCHASEREQUISITION_2 (OData v4). This tool still works against tenants where the legacy service is enabled. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `filter` | string | No | OData $filter expression \(e.g., "PurchaseRequisitionType eq \'NB\'"\) | +| `top` | number | No | Maximum results to return \($top\) | +| `skip` | number | No | Number of results to skip \($skip\) | +| `orderBy` | string | No | OData $orderby expression | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \(e.g., "to_PurchaseReqnItem"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | Array of A_PurchaseRequisitionHeader entities | + +### `sap_s4hana_get_purchase_requisition` + +Retrieve a single purchase requisition by PurchaseRequisition key from SAP S/4HANA Cloud (API_PURCHASEREQ_PROCESS_SRV, A_PurchaseRequisitionHeader). Note: API_PURCHASEREQ_PROCESS_SRV is deprecated since S/4HANA Cloud Public Edition 2402; the successor is API_PURCHASEREQUISITION_2 (OData v4). This tool still works against tenants where the legacy service is enabled. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `purchaseRequisition` | string | Yes | PurchaseRequisition key \(string, up to 10 characters\) | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \(e.g., "to_PurchaseReqnItem"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | A_PurchaseRequisitionHeader entity | + +### `sap_s4hana_create_purchase_requisition` + +Create a purchase requisition in SAP S/4HANA Cloud (API_PURCHASEREQ_PROCESS_SRV, A_PurchaseRequisitionHeader). PurchaseRequisition is auto-assigned by SAP from the document number range; provide line items via the to_PurchaseReqnItem deep-insert array. Note: API_PURCHASEREQ_PROCESS_SRV is deprecated since S/4HANA Cloud Public Edition 2402; the successor is API_PURCHASEREQUISITION_2 (OData v4). This tool still works against tenants where the legacy service is enabled. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `purchaseRequisitionType` | string | Yes | PurchaseRequisitionType \(e.g., "NB" Standard PR\) | +| `items` | json | Yes | to_PurchaseReqnItem deep-insert array \(e.g., \[\{"PurchaseRequisitionItem":"10","Material":"TG11","RequestedQuantity":"5","Plant":"1010","BaseUnit":"PC","DeliveryDate":"/Date\(1735689600000\)/"\}\]\) | +| `body` | json | No | Additional A_PurchaseRequisitionHeader fields merged into the create payload \(e.g., \{"PurchaseRequisitionDescription":"Office supplies"\}\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | Created A_PurchaseRequisitionHeader entity | + +### `sap_s4hana_update_purchase_requisition` + +Update fields on an A_PurchaseRequisitionHeader entity in SAP S/4HANA Cloud (API_PURCHASEREQ_PROCESS_SRV; deprecated since S/4HANA 2402, successor is API_PURCHASEREQUISITION_2 OData v4). PATCH only sends the fields you provide; existing values are preserved. If-Match defaults to a wildcard - for safe concurrent updates pass the ETag from a prior GET to avoid lost updates. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `purchaseRequisition` | string | Yes | PurchaseRequisition key to update \(string, up to 10 characters\) | +| `body` | json | Yes | JSON object with A_PurchaseRequisitionHeader fields to update \(e.g., \{"PurchaseRequisitionType":"NB"\}\) | +| `ifMatch` | string | No | If-Match ETag for optimistic concurrency. Defaults to "*" \(unconditional\). | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP \(204 on success\) | +| `data` | json | Null on 204 success, or updated A_PurchaseRequisitionHeader entity if SAP returns one | + +### `sap_s4hana_list_purchase_orders` + +List purchase orders from SAP S/4HANA Cloud (API_PURCHASEORDER_PROCESS_SRV, A_PurchaseOrder) with optional OData $filter, $top, $skip, $orderby, $select, $expand. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `filter` | string | No | OData $filter expression \(e.g., "CompanyCode eq \'1010\'"\) | +| `top` | number | No | Maximum results to return \($top\) | +| `skip` | number | No | Number of results to skip \($skip\) | +| `orderBy` | string | No | OData $orderby expression | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \(e.g., "to_PurchaseOrderItem"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | Array of A_PurchaseOrder entities | + +### `sap_s4hana_get_purchase_order` + +Retrieve a single purchase order by PurchaseOrder key from SAP S/4HANA Cloud (API_PURCHASEORDER_PROCESS_SRV, A_PurchaseOrder). + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `purchaseOrder` | string | Yes | PurchaseOrder key \(string, up to 10 characters\) | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \(e.g., "to_PurchaseOrderItem"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | A_PurchaseOrder entity | + +### `sap_s4hana_create_purchase_order` + +Create a purchase order in SAP S/4HANA Cloud (API_PURCHASEORDER_PROCESS_SRV, A_PurchaseOrder). PurchaseOrder is auto-assigned by SAP from the document number range; provide line items via the body parameter. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `purchaseOrderType` | string | Yes | PurchaseOrderType \(e.g., "NB" Standard PO\) | +| `companyCode` | string | Yes | CompanyCode \(4 chars, e.g., "1010"\) | +| `purchasingOrganization` | string | Yes | PurchasingOrganization \(4 chars\) | +| `purchasingGroup` | string | Yes | PurchasingGroup \(3 chars\) | +| `supplier` | string | Yes | Supplier business partner key \(up to 10 chars\) | +| `body` | json | Yes | A_PurchaseOrder body containing to_PurchaseOrderItem deep-insert items \(required by SAP\) plus any additional header fields, e.g., \{"to_PurchaseOrderItem":\[\{"PurchaseOrderItem":"10","Material":"TG11","OrderQuantity":"5","Plant":"1010","PurchaseOrderQuantityUnit":"PC","NetPriceAmount":"100.00","DocumentCurrency":"USD"\}\]\}. | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | Created A_PurchaseOrder entity | + +### `sap_s4hana_update_purchase_order` + +Update fields on an A_PurchaseOrder entity in SAP S/4HANA Cloud (API_PURCHASEORDER_PROCESS_SRV). PATCH only sends the fields you provide; existing values are preserved. If-Match defaults to a wildcard (unconditional) — for safe concurrent updates pass the ETag from a prior GET to avoid lost updates. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `purchaseOrder` | string | Yes | PurchaseOrder key to update \(string, up to 10 characters\) | +| `body` | json | Yes | JSON object with A_PurchaseOrder fields to update \(e.g., \{"PurchasingGroup":"002","PurchaseOrderDate":"/Date\(1735689600000\)/"\}\) | +| `ifMatch` | string | No | If-Match ETag for optimistic concurrency. Defaults to "*" \(unconditional\). | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP \(204 on success\) | +| `data` | json | Null on 204 success, or updated A_PurchaseOrder entity if SAP returns one | + +### `sap_s4hana_list_supplier_invoices` + +List supplier invoices from SAP S/4HANA Cloud (API_SUPPLIERINVOICE_PROCESS_SRV, A_SupplierInvoice) with optional OData $filter, $top, $skip, $orderby, $select, $expand. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `filter` | string | No | OData $filter expression \(e.g., "InvoicingParty eq \'17300001\'"\) | +| `top` | number | No | Maximum results to return \($top\) | +| `skip` | number | No | Number of results to skip \($skip\) | +| `orderBy` | string | No | OData $orderby expression | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \($expand\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | Array of A_SupplierInvoice entities | + +### `sap_s4hana_get_supplier_invoice` + +Retrieve a single supplier invoice by composite key (SupplierInvoice + FiscalYear) from SAP S/4HANA Cloud (API_SUPPLIERINVOICE_PROCESS_SRV, A_SupplierInvoice). + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `supplierInvoice` | string | Yes | SupplierInvoice key \(string, up to 10 characters\) | +| `fiscalYear` | string | Yes | FiscalYear \(4-character year, e.g., "2024"\) | +| `select` | string | No | Comma-separated fields to return \($select\) | +| `expand` | string | No | Comma-separated navigation properties to expand \($expand\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | A_SupplierInvoice entity | + +### `sap_s4hana_odata_query` + +Make an arbitrary OData v2 call against any SAP S/4HANA Cloud whitelisted Communication Scenario. Use when no dedicated tool exists for the entity. The proxy handles auth, CSRF, and OData unwrapping. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `subdomain` | string | Yes | SAP BTP subaccount subdomain \(technical name of your subaccount, not the S/4HANA host\) | +| `region` | string | Yes | BTP region \(e.g. eu10, us10\) | +| `clientId` | string | Yes | OAuth client ID from the S/4HANA Communication Arrangement | +| `clientSecret` | string | Yes | OAuth client secret from the S/4HANA Communication Arrangement | +| `deploymentType` | string | No | Deployment type: cloud_public \(default\), cloud_private, or on_premise | +| `authType` | string | No | Authentication type: oauth_client_credentials \(default\) or basic | +| `baseUrl` | string | No | Base URL of the S/4HANA host \(Cloud Private / On-Premise\) | +| `tokenUrl` | string | No | OAuth token URL \(Cloud Private / On-Premise + OAuth\) | +| `username` | string | No | Username for HTTP Basic auth | +| `password` | string | No | Password for HTTP Basic auth | +| `service` | string | Yes | OData service name \(e.g., "API_BUSINESS_PARTNER", "API_SALES_ORDER_SRV"\) | +| `path` | string | Yes | Path inside the service \(e.g., "/A_BusinessPartner" or "/A_BusinessPartner\(\'1000123\'\)"\) | +| `method` | string | No | HTTP method: GET \(default\), POST, PATCH, PUT, DELETE, MERGE | +| `query` | json | No | OData query parameters as JSON object or query string \(e.g., \{"$filter":"BusinessPartnerCategory eq \'1\'","$top":10\}\). $format=json is added automatically when omitted. | +| `body` | json | No | JSON request body for write operations | +| `ifMatch` | string | No | ETag value for the If-Match header \(required by SAP for PATCH/PUT/DELETE on existing entities\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `status` | number | HTTP status code returned by SAP | +| `data` | json | Parsed OData payload \(entity, collection, or null on 204\) | + + diff --git a/apps/docs/content/docs/en/tools/slack.mdx b/apps/docs/content/docs/en/tools/slack.mdx index 0c0a000e9c4..61884d2cc6e 100644 --- a/apps/docs/content/docs/en/tools/slack.mdx +++ b/apps/docs/content/docs/en/tools/slack.mdx @@ -925,6 +925,139 @@ Create a canvas pinned to a Slack channel as its resource hub | --------- | ---- | ----------- | | `canvas_id` | string | ID of the created channel canvas | +### `slack_get_canvas` + +Get Slack canvas file metadata by canvas ID + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `authMethod` | string | No | Authentication method: oauth or bot_token | +| `botToken` | string | No | Bot token for Custom Bot | +| `canvasId` | string | Yes | Canvas file ID to retrieve \(e.g., F1234ABCD\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `canvas` | object | Canvas file information returned by Slack | +| ↳ `id` | string | Unique canvas file identifier | +| ↳ `created` | number | Unix timestamp when the canvas was created | +| ↳ `timestamp` | number | Unix timestamp associated with the canvas | +| ↳ `name` | string | Canvas file name | +| ↳ `title` | string | Canvas title | +| ↳ `mimetype` | string | MIME type of the canvas file | +| ↳ `filetype` | string | Slack file type for the canvas | +| ↳ `pretty_type` | string | Human-readable file type | +| ↳ `user` | string | User ID of the canvas creator | +| ↳ `editable` | boolean | Whether the canvas file is editable | +| ↳ `size` | number | Canvas file size in bytes | +| ↳ `mode` | string | File mode | +| ↳ `is_external` | boolean | Whether the canvas is externally hosted | +| ↳ `is_public` | boolean | Whether the canvas is public | +| ↳ `url_private` | string | Private URL for the canvas file | +| ↳ `url_private_download` | string | Private download URL for the canvas file | +| ↳ `permalink` | string | Permanent URL for the canvas | +| ↳ `channels` | array | Public channel IDs where the canvas appears | +| ↳ `groups` | array | Private channel IDs where the canvas appears | +| ↳ `ims` | array | Direct message IDs where the canvas appears | +| ↳ `canvas_readtime` | number | Approximate read time for canvas content | +| ↳ `is_channel_space` | boolean | Whether this canvas is linked to a channel | +| ↳ `linked_channel_id` | string | Channel ID linked to this canvas | +| ↳ `canvas_creator_id` | string | User ID of the canvas creator | + +### `slack_list_canvases` + +List Slack canvases available to the authenticated user or bot + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `authMethod` | string | No | Authentication method: oauth or bot_token | +| `botToken` | string | No | Bot token for Custom Bot | +| `channel` | string | No | Filter canvases appearing in a specific channel ID | +| `count` | number | No | Number of canvases to return per page | +| `page` | number | No | Page number to return | +| `user` | string | No | Filter canvases created by a single user ID | +| `tsFrom` | string | No | Filter canvases created after this Unix timestamp | +| `tsTo` | string | No | Filter canvases created before this Unix timestamp | +| `teamId` | string | No | Encoded team ID, required when using an org-level token | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `canvases` | array | Canvas file objects returned by Slack | +| ↳ `id` | string | Unique canvas file identifier | +| ↳ `created` | number | Unix timestamp when the canvas was created | +| ↳ `timestamp` | number | Unix timestamp associated with the canvas | +| ↳ `name` | string | Canvas file name | +| ↳ `title` | string | Canvas title | +| ↳ `mimetype` | string | MIME type of the canvas file | +| ↳ `filetype` | string | Slack file type for the canvas | +| ↳ `pretty_type` | string | Human-readable file type | +| ↳ `user` | string | User ID of the canvas creator | +| ↳ `editable` | boolean | Whether the canvas file is editable | +| ↳ `size` | number | Canvas file size in bytes | +| ↳ `mode` | string | File mode | +| ↳ `is_external` | boolean | Whether the canvas is externally hosted | +| ↳ `is_public` | boolean | Whether the canvas is public | +| ↳ `url_private` | string | Private URL for the canvas file | +| ↳ `url_private_download` | string | Private download URL for the canvas file | +| ↳ `permalink` | string | Permanent URL for the canvas | +| ↳ `channels` | array | Public channel IDs where the canvas appears | +| ↳ `groups` | array | Private channel IDs where the canvas appears | +| ↳ `ims` | array | Direct message IDs where the canvas appears | +| ↳ `canvas_readtime` | number | Approximate read time for canvas content | +| ↳ `is_channel_space` | boolean | Whether this canvas is linked to a channel | +| ↳ `linked_channel_id` | string | Channel ID linked to this canvas | +| ↳ `canvas_creator_id` | string | User ID of the canvas creator | +| `paging` | object | Pagination information from Slack | +| ↳ `count` | number | Number of items requested per page | +| ↳ `total` | number | Total number of matching files | +| ↳ `page` | number | Current page number | +| ↳ `pages` | number | Total number of pages | + +### `slack_lookup_canvas_sections` + +Find Slack canvas section IDs matching criteria for later edits + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `authMethod` | string | No | Authentication method: oauth or bot_token | +| `botToken` | string | No | Bot token for Custom Bot | +| `canvasId` | string | Yes | Canvas ID to search \(e.g., F1234ABCD\) | +| `criteria` | json | Yes | Section lookup criteria, such as \{"section_types":\["h1"\],"contains_text":"Roadmap"\} | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `sections` | array | Canvas sections matching the lookup criteria | +| ↳ `id` | string | Canvas section identifier | + +### `slack_delete_canvas` + +Delete a Slack canvas by its canvas ID + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `authMethod` | string | No | Authentication method: oauth or bot_token | +| `botToken` | string | No | Bot token for Custom Bot | +| `canvasId` | string | Yes | Canvas ID to delete \(e.g., F1234ABCD\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `ok` | boolean | Whether Slack deleted the canvas successfully | + ### `slack_create_conversation` Create a new public or private channel in a Slack workspace. diff --git a/apps/docs/content/docs/en/tools/stagehand.mdx b/apps/docs/content/docs/en/tools/stagehand.mdx index d03ba626a77..c83d0cf5431 100644 --- a/apps/docs/content/docs/en/tools/stagehand.mdx +++ b/apps/docs/content/docs/en/tools/stagehand.mdx @@ -72,6 +72,8 @@ Run an autonomous web agent to complete tasks and extract structured data | `provider` | string | No | AI provider to use: openai or anthropic | | `apiKey` | string | Yes | API key for the selected provider | | `outputSchema` | json | No | Optional JSON schema defining the structure of data the agent should return | +| `mode` | string | No | Agent tool mode: dom \(default\), hybrid, or cua | +| `maxSteps` | number | No | Maximum agent steps \(default 20, max 200\) | #### Output @@ -92,5 +94,7 @@ Run an autonomous web agent to complete tasks and extract structured data | ↳ `timestamp` | number | Unix timestamp when the action was performed | | ↳ `timeMs` | number | Time in milliseconds \(for wait actions\) | | `structuredOutput` | object | Extracted data matching the provided output schema | +| `liveViewUrl` | string | Embeddable Browserbase live view URL \(active only while the session is running\) | +| `sessionId` | string | Browserbase session identifier | diff --git a/apps/docs/content/docs/en/triggers/index.mdx b/apps/docs/content/docs/en/triggers/index.mdx index 99369685055..e996a39bc27 100644 --- a/apps/docs/content/docs/en/triggers/index.mdx +++ b/apps/docs/content/docs/en/triggers/index.mdx @@ -89,6 +89,8 @@ Polling Groups let you monitor multiple team members' Gmail or Outlook inboxes w Invitees receive an email with a link to connect their account. Once connected, their inbox is automatically included in the polling group. Invitees don't need to be members of your Sim organization. +This is separate from external workspace membership: polling group invitees are granting access to an inbox for a trigger, while external workspace members are collaborators with Read, Write, or Admin access to a workspace. + **Using in a Workflow** When configuring an email trigger, select your polling group from the credentials dropdown instead of an individual account. The system creates webhooks for each member and routes all emails through your workflow. diff --git a/apps/docs/content/docs/en/variables/index.mdx b/apps/docs/content/docs/en/variables/index.mdx index 0596e08a75c..d3543f368f7 100644 --- a/apps/docs/content/docs/en/variables/index.mdx +++ b/apps/docs/content/docs/en/variables/index.mdx @@ -49,7 +49,7 @@ Environment variables store sensitive values like API keys, tokens, and configur | Scope | Visibility | Use case | |-------|-----------|----------| -| **Workspace** | All workspace members | Shared API keys, team configuration | +| **Workspace** | All workspace members, including external workspace members | Shared API keys, team configuration | | **Personal** | Only you | Your personal tokens, dev credentials | When both a workspace and personal variable share the same key, the workspace value takes precedence. @@ -84,7 +84,7 @@ If a workflow variable and a block output share the same name, Sim resolves the syntax. Environment variables store sensitive configuration like API keys using {{KEY}} syntax. They never appear in logs and are managed at the workspace or personal level." }, { question: "Can I use environment variables in the Function block?", answer: "Yes. Use the double curly brace syntax {{KEY}} directly in your code. The value is substituted before execution, so the actual secret never appears in logs or outputs." }, - { question: "How do I share an API key with my team?", answer: "Create a workspace-scoped environment variable in Settings → Secrets. All workspace members will be able to use it in their workflows via {{KEY}} syntax." }, + { question: "How do I share an API key with my team?", answer: "Create a workspace-scoped environment variable in Settings → Secrets. All workspace members, including external workspace members, will be able to use it in their workflows via {{KEY}} syntax." }, { question: "What happens if a variable name has spaces or mixed case?", answer: "Variable resolution is case-insensitive and ignores spaces. A variable named 'My Counter' can be referenced as or . However, using consistent naming (like camelCase) is recommended." }, { question: "Can I reference environment variables in the Agent system prompt?", answer: "Yes. You can use {{KEY}} syntax in any text field, including system prompts, to inject environment variable values." }, ]} /> diff --git a/apps/sim/app/(auth)/signup/signup-form.tsx b/apps/sim/app/(auth)/signup/signup-form.tsx index c721a07291b..2959cf699cf 100644 --- a/apps/sim/app/(auth)/signup/signup-form.tsx +++ b/apps/sim/app/(auth)/signup/signup-form.tsx @@ -98,11 +98,7 @@ function SignupFormContent({ githubAvailable, googleAvailable, isProduction }: S const [showEmailValidationError, setShowEmailValidationError] = useState(false) const [formError, setFormError] = useState(null) const turnstileRef = useRef(null) - const [turnstileSiteKey, setTurnstileSiteKey] = useState() - - useEffect(() => { - setTurnstileSiteKey(getEnv('NEXT_PUBLIC_TURNSTILE_SITE_KEY')) - }, []) + const [turnstileSiteKey] = useState(() => getEnv('NEXT_PUBLIC_TURNSTILE_SITE_KEY')) const rawRedirectUrl = searchParams.get('redirect') || searchParams.get('callbackUrl') || '' const isValidRedirectUrl = rawRedirectUrl ? validateCallbackUrl(rawRedirectUrl) : false const invalidCallbackRef = useRef(false) diff --git a/apps/sim/app/(landing)/components/auth-modal/auth-modal.tsx b/apps/sim/app/(landing)/components/auth-modal/auth-modal.tsx index d7a213f2499..7b3fb99bc9e 100644 --- a/apps/sim/app/(landing)/components/auth-modal/auth-modal.tsx +++ b/apps/sim/app/(landing)/components/auth-modal/auth-modal.tsx @@ -1,6 +1,6 @@ 'use client' -import { useCallback, useEffect, useMemo, useState } from 'react' +import { useEffect, useMemo, useState } from 'react' import { createLogger } from '@sim/logger' import { Loader2, X } from 'lucide-react' import Image from 'next/image' @@ -88,24 +88,21 @@ export function AuthModal({ children, defaultView = 'login', source }: AuthModal } }, [open, providerStatus, hasModalContent, defaultView, router, view]) - const handleOpenChange = useCallback( - (nextOpen: boolean) => { - if (nextOpen && providerStatus && !hasModalContent) { - router.push(defaultView === 'login' ? '/login' : '/signup') - return - } - setOpen(nextOpen) - if (nextOpen) { - const initialView = - defaultView === 'signup' && providerStatus?.registrationDisabled ? 'login' : defaultView - setView(initialView) - captureClientEvent('auth_modal_opened', { view: initialView, source }) - } - }, - [defaultView, hasModalContent, providerStatus, router, source] - ) + function handleOpenChange(nextOpen: boolean) { + if (nextOpen && providerStatus && !hasModalContent) { + router.push(defaultView === 'login' ? '/login' : '/signup') + return + } + setOpen(nextOpen) + if (nextOpen) { + const initialView = + defaultView === 'signup' && providerStatus?.registrationDisabled ? 'login' : defaultView + setView(initialView) + captureClientEvent('auth_modal_opened', { view: initialView, source }) + } + } - const handleSocialLogin = useCallback(async (provider: 'github' | 'google') => { + async function handleSocialLogin(provider: 'github' | 'google') { setSocialLoading(provider) try { await client.signIn.social({ provider, callbackURL: '/workspace' }) @@ -114,17 +111,17 @@ export function AuthModal({ children, defaultView = 'login', source }: AuthModal } finally { setSocialLoading(null) } - }, []) + } - const handleSSOLogin = useCallback(() => { + function handleSSOLogin() { setOpen(false) router.push('/sso') - }, [router]) + } - const handleEmailContinue = useCallback(() => { + function handleEmailContinue() { setOpen(false) router.push(view === 'login' ? '/login' : '/signup') - }, [router, view]) + } return ( diff --git a/apps/sim/app/(landing)/components/contact/contact-form.tsx b/apps/sim/app/(landing)/components/contact/contact-form.tsx index 11030ac760a..879f28133bc 100644 --- a/apps/sim/app/(landing)/components/contact/contact-form.tsx +++ b/apps/sim/app/(landing)/components/contact/contact-form.tsx @@ -1,6 +1,6 @@ 'use client' -import { useEffect, useRef, useState } from 'react' +import { useRef, useState } from 'react' import { Turnstile, type TurnstileInstance } from '@marsidev/react-turnstile' import { toError } from '@sim/utils/errors' import { useMutation } from '@tanstack/react-query' @@ -99,11 +99,7 @@ export function ContactForm() { const [isSubmitting, setIsSubmitting] = useState(false) const [website, setWebsite] = useState('') const [widgetReady, setWidgetReady] = useState(false) - const [turnstileSiteKey, setTurnstileSiteKey] = useState() - - useEffect(() => { - setTurnstileSiteKey(getEnv('NEXT_PUBLIC_TURNSTILE_SITE_KEY')) - }, []) + const [turnstileSiteKey] = useState(() => getEnv('NEXT_PUBLIC_TURNSTILE_SITE_KEY')) function updateField( field: TField, diff --git a/apps/sim/app/(landing)/integrations/data/icon-mapping.ts b/apps/sim/app/(landing)/integrations/data/icon-mapping.ts index 8417728536c..9d3280bd825 100644 --- a/apps/sim/app/(landing)/integrations/data/icon-mapping.ts +++ b/apps/sim/app/(landing)/integrations/data/icon-mapping.ts @@ -154,6 +154,7 @@ import { RootlyIcon, S3Icon, SalesforceIcon, + SapS4HanaIcon, SESIcon, SearchIcon, SecretsManagerIcon, @@ -351,6 +352,7 @@ export const blockTypeToIconMap: Record = { rootly: RootlyIcon, s3: S3Icon, salesforce: SalesforceIcon, + sap_s4hana: SapS4HanaIcon, search: SearchIcon, secrets_manager: SecretsManagerIcon, sendgrid: SendgridIcon, diff --git a/apps/sim/app/(landing)/integrations/data/integrations.json b/apps/sim/app/(landing)/integrations/data/integrations.json index 360f103bbcd..b11c74c5f47 100644 --- a/apps/sim/app/(landing)/integrations/data/integrations.json +++ b/apps/sim/app/(landing)/integrations/data/integrations.json @@ -11379,6 +11379,177 @@ "integrationTypes": ["crm", "customer-support", "sales"], "tags": ["sales-engagement", "customer-support"] }, + { + "type": "sap_s4hana", + "slug": "sap-s4hana", + "name": "SAP S4HANA", + "description": "Read and write SAP S4HANA Cloud business data via OData", + "longDescription": "Connect SAP S4HANA Cloud Public Edition with per-tenant OAuth 2.0 client credentials configured in your Communication Arrangements. Read and create business partners, customers, suppliers, sales orders, deliveries (inbound/outbound), billing documents, products, stock and material documents, purchase requisitions, purchase orders, and supplier invoices, or run arbitrary OData v2 queries against any whitelisted Communication Scenario.", + "bgColor": "#FFFFFF", + "iconName": "SapS4HanaIcon", + "docsUrl": "https://docs.sim.ai/tools/sap_s4hana", + "operations": [ + { + "name": "List Business Partners", + "description": "List business partners from SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_BusinessPartner) with optional OData $filter, $top, $skip, $orderby, $select, $expand." + }, + { + "name": "Get Business Partner", + "description": "Retrieve a single business partner by BusinessPartner key from SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_BusinessPartner)." + }, + { + "name": "Create Business Partner", + "description": "Create a business partner in SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_BusinessPartner). For Person category 1 provide FirstName and LastName. For Organization category 2 provide OrganizationBPName1." + }, + { + "name": "Update Business Partner", + "description": "Update fields on an A_BusinessPartner entity in SAP S/4HANA Cloud (API_BUSINESS_PARTNER). PATCH only sends the fields you provide; existing values are preserved. If-Match defaults to a wildcard (unconditional) — for safe concurrent updates pass the ETag from a prior GET to avoid lost updates." + }, + { + "name": "List Customers", + "description": "List customers from SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_Customer) with optional OData $filter, $top, $skip, $orderby, $select, $expand." + }, + { + "name": "Get Customer", + "description": "Retrieve a single customer by Customer key from SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_Customer)." + }, + { + "name": "Update Customer", + "description": "Update fields on an A_Customer entity in SAP S/4HANA Cloud (API_BUSINESS_PARTNER). PATCH only sends the fields you provide; existing values are preserved. A_Customer PATCH is limited to modifiable fields such as OrderIsBlockedForCustomer, DeliveryIsBlock, BillingIsBlockedForCustomer, PostingIsBlocked, and DeletionIndicator. If-Match defaults to a wildcard - for safe concurrent updates pass the ETag from a prior GET to avoid lost updates." + }, + { + "name": "List Suppliers", + "description": "List suppliers from SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_Supplier) with optional OData $filter, $top, $skip, $orderby, $select, $expand." + }, + { + "name": "Get Supplier", + "description": "Retrieve a single supplier by Supplier key from SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_Supplier)." + }, + { + "name": "Update Supplier", + "description": "Update fields on an A_Supplier entity in SAP S/4HANA Cloud (API_BUSINESS_PARTNER). PATCH only sends the fields you provide; existing values are preserved. A_Supplier PATCH is limited to modifiable fields such as PostingIsBlocked, PurchasingIsBlocked, PaymentIsBlockedForSupplier, DeletionIndicator, and SupplierAccountGroup. If-Match defaults to a wildcard - for safe concurrent updates pass the ETag from a prior GET to avoid lost updates." + }, + { + "name": "List Sales Orders", + "description": "List sales orders from SAP S/4HANA Cloud (API_SALES_ORDER_SRV, A_SalesOrder) with optional OData $filter, $top, $skip, $orderby, $select, $expand." + }, + { + "name": "Get Sales Order", + "description": "Retrieve a single sales order by SalesOrder key from SAP S/4HANA Cloud (API_SALES_ORDER_SRV, A_SalesOrder)." + }, + { + "name": "Create Sales Order", + "description": "Create a sales order in SAP S/4HANA Cloud (API_SALES_ORDER_SRV, A_SalesOrder) with deep insert of sales order items via to_Item." + }, + { + "name": "Update Sales Order", + "description": "Update fields on an A_SalesOrder entity in SAP S/4HANA Cloud (API_SALES_ORDER_SRV). PATCH only sends the fields you provide; existing values are preserved. If-Match defaults to a wildcard (unconditional) — for safe concurrent updates pass the ETag from a prior GET to avoid lost updates." + }, + { + "name": "Delete Sales Order", + "description": "Delete an A_SalesOrder entity in SAP S/4HANA Cloud (API_SALES_ORDER_SRV). Only orders without subsequent documents (deliveries, invoices) can be deleted; otherwise reject items via update instead." + }, + { + "name": "List Outbound Deliveries", + "description": "List outbound deliveries from SAP S/4HANA Cloud (API_OUTBOUND_DELIVERY_SRV;v=0002, A_OutbDeliveryHeader) with optional OData $filter, $top, $skip, $orderby, $select, $expand." + }, + { + "name": "Get Outbound Delivery", + "description": "Retrieve a single outbound delivery by DeliveryDocument key from SAP S/4HANA Cloud (API_OUTBOUND_DELIVERY_SRV;v=0002, A_OutbDeliveryHeader)." + }, + { + "name": "List Inbound Deliveries", + "description": "List inbound deliveries from SAP S/4HANA Cloud (API_INBOUND_DELIVERY_SRV;v=0002, A_InbDeliveryHeader) with optional OData $filter, $top, $skip, $orderby, $select, $expand." + }, + { + "name": "Get Inbound Delivery", + "description": "Retrieve a single inbound delivery by DeliveryDocument key from SAP S/4HANA Cloud (API_INBOUND_DELIVERY_SRV;v=0002, A_InbDeliveryHeader)." + }, + { + "name": "List Billing Documents", + "description": "List billing documents (customer invoices) from SAP S/4HANA Cloud (API_BILLING_DOCUMENT_SRV, A_BillingDocument) with optional OData $filter, $top, $skip, $orderby, $select, $expand." + }, + { + "name": "Get Billing Document", + "description": "Retrieve a single billing document (customer invoice) by BillingDocument key from SAP S/4HANA Cloud (API_BILLING_DOCUMENT_SRV, A_BillingDocument)." + }, + { + "name": "List Products", + "description": "List products (materials) from SAP S/4HANA Cloud (API_PRODUCT_SRV, A_Product) with optional OData $filter, $top, $skip, $orderby, $select, $expand." + }, + { + "name": "Get Product", + "description": "Retrieve a single product (material) by Product key from SAP S/4HANA Cloud (API_PRODUCT_SRV, A_Product)." + }, + { + "name": "Update Product", + "description": "Update fields on an A_Product entity in SAP S/4HANA Cloud (API_PRODUCT_SRV). PATCH only sends the fields you provide; existing values are preserved. Flat scalar header fields only — deep/multi-entity updates across navigation properties are not supported by API_PRODUCT_SRV PATCH/PUT (see SAP KBA 2833338); update child entities (plant, valuation, sales data, etc.) via their own endpoints. If-Match defaults to a wildcard (unconditional) — for safe concurrent updates pass the ETag from a prior GET." + }, + { + "name": "List Material Stock", + "description": "List material stock quantities from SAP S/4HANA Cloud (API_MATERIAL_STOCK_SRV, A_MatlStkInAcctMod). The entity uses an 11-field composite key (Material, Plant, StorageLocation, Batch, Supplier, Customer, WBSElementInternalID, SDDocument, SDDocumentItem, InventorySpecialStockType, InventoryStockType) — query with $filter on these fields instead of a direct key lookup." + }, + { + "name": "List Material Documents", + "description": "List material document headers (goods movements) from SAP S/4HANA Cloud (API_MATERIAL_DOCUMENT_SRV, A_MaterialDocumentHeader) with optional OData $filter, $top, $skip, $orderby, $select, $expand." + }, + { + "name": "Get Material Document", + "description": "Retrieve a single material document header by composite key (MaterialDocument + MaterialDocumentYear) from SAP S/4HANA Cloud (API_MATERIAL_DOCUMENT_SRV, A_MaterialDocumentHeader)." + }, + { + "name": "List Purchase Requisitions", + "description": "List purchase requisitions from SAP S/4HANA Cloud (API_PURCHASEREQ_PROCESS_SRV, A_PurchaseRequisitionHeader) with optional OData $filter, $top, $skip, $orderby, $select, $expand. Note: API_PURCHASEREQ_PROCESS_SRV is deprecated since S/4HANA Cloud Public Edition 2402; the successor is API_PURCHASEREQUISITION_2 (OData v4). This tool still works against tenants where the legacy service is enabled." + }, + { + "name": "Get Purchase Requisition", + "description": "Retrieve a single purchase requisition by PurchaseRequisition key from SAP S/4HANA Cloud (API_PURCHASEREQ_PROCESS_SRV, A_PurchaseRequisitionHeader). Note: API_PURCHASEREQ_PROCESS_SRV is deprecated since S/4HANA Cloud Public Edition 2402; the successor is API_PURCHASEREQUISITION_2 (OData v4). This tool still works against tenants where the legacy service is enabled." + }, + { + "name": "Create Purchase Requisition", + "description": "Create a purchase requisition in SAP S/4HANA Cloud (API_PURCHASEREQ_PROCESS_SRV, A_PurchaseRequisitionHeader). PurchaseRequisition is auto-assigned by SAP from the document number range; provide line items via the to_PurchaseReqnItem deep-insert array. Note: API_PURCHASEREQ_PROCESS_SRV is deprecated since S/4HANA Cloud Public Edition 2402; the successor is API_PURCHASEREQUISITION_2 (OData v4). This tool still works against tenants where the legacy service is enabled." + }, + { + "name": "Update Purchase Requisition", + "description": "Update fields on an A_PurchaseRequisitionHeader entity in SAP S/4HANA Cloud (API_PURCHASEREQ_PROCESS_SRV; deprecated since S/4HANA 2402, successor is API_PURCHASEREQUISITION_2 OData v4). PATCH only sends the fields you provide; existing values are preserved. If-Match defaults to a wildcard - for safe concurrent updates pass the ETag from a prior GET to avoid lost updates." + }, + { + "name": "List Purchase Orders", + "description": "List purchase orders from SAP S/4HANA Cloud (API_PURCHASEORDER_PROCESS_SRV, A_PurchaseOrder) with optional OData $filter, $top, $skip, $orderby, $select, $expand." + }, + { + "name": "Get Purchase Order", + "description": "Retrieve a single purchase order by PurchaseOrder key from SAP S/4HANA Cloud (API_PURCHASEORDER_PROCESS_SRV, A_PurchaseOrder)." + }, + { + "name": "Create Purchase Order", + "description": "Create a purchase order in SAP S/4HANA Cloud (API_PURCHASEORDER_PROCESS_SRV, A_PurchaseOrder). PurchaseOrder is auto-assigned by SAP from the document number range; provide line items via the body parameter." + }, + { + "name": "Update Purchase Order", + "description": "Update fields on an A_PurchaseOrder entity in SAP S/4HANA Cloud (API_PURCHASEORDER_PROCESS_SRV). PATCH only sends the fields you provide; existing values are preserved. If-Match defaults to a wildcard (unconditional) — for safe concurrent updates pass the ETag from a prior GET to avoid lost updates." + }, + { + "name": "List Supplier Invoices", + "description": "List supplier invoices from SAP S/4HANA Cloud (API_SUPPLIERINVOICE_PROCESS_SRV, A_SupplierInvoice) with optional OData $filter, $top, $skip, $orderby, $select, $expand." + }, + { + "name": "Get Supplier Invoice", + "description": "Retrieve a single supplier invoice by composite key (SupplierInvoice + FiscalYear) from SAP S/4HANA Cloud (API_SUPPLIERINVOICE_PROCESS_SRV, A_SupplierInvoice)." + }, + { + "name": "OData Query (advanced)", + "description": "Make an arbitrary OData v2 call against any SAP S/4HANA Cloud whitelisted Communication Scenario. Use when no dedicated tool exists for the entity. The proxy handles auth, CSRF, and OData unwrapping." + } + ], + "operationCount": 38, + "triggers": [], + "triggerCount": 0, + "authType": "none", + "category": "tools", + "integrationTypes": ["other", "developer-tools"], + "tags": ["automation"] + }, { "type": "search", "slug": "search", @@ -11983,6 +12154,22 @@ "name": "Create Channel Canvas", "description": "Create a canvas pinned to a Slack channel as its resource hub" }, + { + "name": "Get Canvas Info", + "description": "Get Slack canvas file metadata by canvas ID" + }, + { + "name": "List Canvases", + "description": "List Slack canvases available to the authenticated user or bot" + }, + { + "name": "Lookup Canvas Sections", + "description": "Find Slack canvas section IDs matching criteria for later edits" + }, + { + "name": "Delete Canvas", + "description": "Delete a Slack canvas by its canvas ID" + }, { "name": "Create Conversation", "description": "Create a new public or private channel in a Slack workspace." @@ -12008,7 +12195,7 @@ "description": "Publish a static view to a user" } ], - "operationCount": 25, + "operationCount": 29, "triggers": [ { "id": "slack_webhook", diff --git a/apps/sim/app/api/admin/mothership/route.ts b/apps/sim/app/api/admin/mothership/route.ts index 19e4a029ec4..d34efca9e50 100644 --- a/apps/sim/app/api/admin/mothership/route.ts +++ b/apps/sim/app/api/admin/mothership/route.ts @@ -16,6 +16,14 @@ function getMothershipUrl(environment: string): string | null { return ENV_URLS[environment] ?? null } +const ENDPOINT_PATTERN = /^[a-zA-Z0-9_-]+(?:\/[a-zA-Z0-9_-]+)*$/ + +function isValidEndpoint(endpoint: string): boolean { + if (!endpoint) return false + if (endpoint.includes('..')) return false + return ENDPOINT_PATTERN.test(endpoint) +} + async function isAdminRequestAuthorized() { const session = await getSession() if (!session?.user?.id) return false @@ -57,6 +65,10 @@ export const POST = withRouteHandler(async (req: NextRequest) => { return NextResponse.json({ error: 'endpoint query param required' }, { status: 400 }) } + if (!isValidEndpoint(endpoint)) { + return NextResponse.json({ error: 'invalid endpoint' }, { status: 400 }) + } + const baseUrl = getMothershipUrl(environment) if (!baseUrl) { return NextResponse.json( @@ -108,6 +120,10 @@ export const GET = withRouteHandler(async (req: NextRequest) => { return NextResponse.json({ error: 'endpoint query param required' }, { status: 400 }) } + if (!isValidEndpoint(endpoint)) { + return NextResponse.json({ error: 'invalid endpoint' }, { status: 400 }) + } + const baseUrl = getMothershipUrl(environment) if (!baseUrl) { return NextResponse.json( diff --git a/apps/sim/app/api/chat/[identifier]/otp/route.test.ts b/apps/sim/app/api/chat/[identifier]/otp/route.test.ts index 8069757ea79..fa8da3f97c5 100644 --- a/apps/sim/app/api/chat/[identifier]/otp/route.test.ts +++ b/apps/sim/app/api/chat/[identifier]/otp/route.test.ts @@ -112,6 +112,16 @@ vi.mock('@/lib/core/storage', () => ({ getStorageMethod: mockGetStorageMethod, })) +const { mockCheckRateLimitDirect } = vi.hoisted(() => ({ + mockCheckRateLimitDirect: vi.fn(), +})) + +vi.mock('@/lib/core/rate-limiter', () => ({ + RateLimiter: class { + checkRateLimitDirect = mockCheckRateLimitDirect + }, +})) + vi.mock('@/lib/messaging/email/mailer', () => ({ sendEmail: mockSendEmail, })) @@ -234,6 +244,13 @@ describe('Chat OTP API Route', () => { })) requestUtilsMockFns.mockGenerateRequestId.mockReturnValue('req-123') + requestUtilsMockFns.mockGetClientIp.mockReturnValue('1.2.3.4') + + mockCheckRateLimitDirect.mockResolvedValue({ + allowed: true, + remaining: 10, + resetAt: new Date(Date.now() + 60_000), + }) mockZodParse.mockImplementation((data: unknown) => data) @@ -283,6 +300,134 @@ describe('Chat OTP API Route', () => { }) }) + describe('POST - Rate limiting', () => { + const buildDeploymentSelect = () => + mockDbSelect.mockImplementationOnce(() => ({ + from: vi.fn().mockReturnValue({ + where: vi.fn().mockReturnValue({ + limit: vi.fn().mockResolvedValue([ + { + id: mockChatId, + authType: 'email', + allowedEmails: [mockEmail], + title: 'Test Chat', + }, + ]), + }), + }), + })) + + it('returns 429 with Retry-After when IP rate limit is exceeded', async () => { + mockCheckRateLimitDirect.mockResolvedValueOnce({ + allowed: false, + remaining: 0, + resetAt: new Date(Date.now() + 900_000), + retryAfterMs: 900_000, + }) + + const headerSet = vi.fn() + mockCreateErrorResponse.mockImplementationOnce((message: string, status: number) => ({ + json: () => Promise.resolve({ error: message }), + status, + headers: { set: headerSet }, + })) + + const request = new NextRequest('http://localhost:3000/api/chat/test/otp', { + method: 'POST', + body: JSON.stringify({ email: mockEmail }), + }) + + const response = await POST(request, { + params: Promise.resolve({ identifier: mockIdentifier }), + }) + + expect(response.status).toBe(429) + expect(headerSet).toHaveBeenCalledWith('Retry-After', '900') + expect(mockSendEmail).not.toHaveBeenCalled() + expect(mockDbSelect).not.toHaveBeenCalled() + }) + + it('returns 429 with Retry-After when email rate limit is exceeded', async () => { + mockCheckRateLimitDirect + .mockResolvedValueOnce({ + allowed: true, + remaining: 9, + resetAt: new Date(Date.now() + 60_000), + }) + .mockResolvedValueOnce({ + allowed: false, + remaining: 0, + resetAt: new Date(Date.now() + 900_000), + retryAfterMs: 900_000, + }) + + const headerSet = vi.fn() + mockCreateErrorResponse.mockImplementationOnce((message: string, status: number) => ({ + json: () => Promise.resolve({ error: message }), + status, + headers: { set: headerSet }, + })) + + buildDeploymentSelect() + + const request = new NextRequest('http://localhost:3000/api/chat/test/otp', { + method: 'POST', + body: JSON.stringify({ email: mockEmail }), + }) + + const response = await POST(request, { + params: Promise.resolve({ identifier: mockIdentifier }), + }) + + expect(response.status).toBe(429) + expect(headerSet).toHaveBeenCalledWith('Retry-After', '900') + expect(mockSendEmail).not.toHaveBeenCalled() + }) + + it('falls back to refill interval when retryAfterMs is missing', async () => { + mockCheckRateLimitDirect.mockResolvedValueOnce({ + allowed: false, + remaining: 0, + resetAt: new Date(Date.now() + 900_000), + }) + + const headerSet = vi.fn() + mockCreateErrorResponse.mockImplementationOnce((message: string, status: number) => ({ + json: () => Promise.resolve({ error: message }), + status, + headers: { set: headerSet }, + })) + + const request = new NextRequest('http://localhost:3000/api/chat/test/otp', { + method: 'POST', + body: JSON.stringify({ email: mockEmail }), + }) + + await POST(request, { params: Promise.resolve({ identifier: mockIdentifier }) }) + + expect(headerSet).toHaveBeenCalledWith('Retry-After', '900') + }) + + it('skips IP rate limit when client IP is unknown', async () => { + requestUtilsMockFns.mockGetClientIp.mockReturnValueOnce('unknown') + buildDeploymentSelect() + + const request = new NextRequest('http://localhost:3000/api/chat/test/otp', { + method: 'POST', + body: JSON.stringify({ email: mockEmail }), + }) + + await POST(request, { params: Promise.resolve({ identifier: mockIdentifier }) }) + + // Only the email-scoped check should run, not the IP-scoped one + expect(mockCheckRateLimitDirect).toHaveBeenCalledTimes(1) + expect(mockCheckRateLimitDirect).toHaveBeenCalledWith( + expect.stringContaining('chat-otp:email:'), + expect.any(Object) + ) + }) + }) + describe('POST - Store OTP (Database path)', () => { beforeEach(() => { mockGetStorageMethod.mockReturnValue('database') diff --git a/apps/sim/app/api/chat/[identifier]/otp/route.ts b/apps/sim/app/api/chat/[identifier]/otp/route.ts index 433159ff600..9010f9af464 100644 --- a/apps/sim/app/api/chat/[identifier]/otp/route.ts +++ b/apps/sim/app/api/chat/[identifier]/otp/route.ts @@ -8,9 +8,11 @@ import type { NextRequest } from 'next/server' import { z } from 'zod' import { renderOTPEmail } from '@/components/emails' import { getRedisClient } from '@/lib/core/config/redis' +import type { TokenBucketConfig } from '@/lib/core/rate-limiter' +import { RateLimiter } from '@/lib/core/rate-limiter' import { addCorsHeaders, isEmailAllowed } from '@/lib/core/security/deployment' import { getStorageMethod } from '@/lib/core/storage' -import { generateRequestId } from '@/lib/core/utils/request' +import { generateRequestId, getClientIp } from '@/lib/core/utils/request' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' import { sendEmail } from '@/lib/messaging/email/mailer' import { setChatAuthCookie } from '@/app/api/chat/utils' @@ -18,6 +20,20 @@ import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/ const logger = createLogger('ChatOtpAPI') +const rateLimiter = new RateLimiter() + +const OTP_IP_RATE_LIMIT: TokenBucketConfig = { + maxTokens: 10, + refillRate: 10, + refillIntervalMs: 15 * 60_000, +} + +const OTP_EMAIL_RATE_LIMIT: TokenBucketConfig = { + maxTokens: 3, + refillRate: 3, + refillIntervalMs: 15 * 60_000, +} + function generateOTP(): string { return randomInt(100000, 1000000).toString() } @@ -214,6 +230,23 @@ export const POST = withRouteHandler( const requestId = generateRequestId() try { + const ip = getClientIp(request) + if (ip !== 'unknown') { + const ipRateLimit = await rateLimiter.checkRateLimitDirect( + `chat-otp:ip:${identifier}:${ip}`, + OTP_IP_RATE_LIMIT + ) + if (!ipRateLimit.allowed) { + logger.warn(`[${requestId}] OTP IP rate limit exceeded for ${identifier} from ${ip}`) + const retryAfter = Math.ceil( + (ipRateLimit.retryAfterMs ?? OTP_IP_RATE_LIMIT.refillIntervalMs) / 1000 + ) + const response = createErrorResponse('Too many requests. Please try again later.', 429) + response.headers.set('Retry-After', String(retryAfter)) + return addCorsHeaders(response, request) + } + } + const body = await request.json() const { email } = otpRequestSchema.parse(body) @@ -255,6 +288,25 @@ export const POST = withRouteHandler( ) } + const emailRateLimit = await rateLimiter.checkRateLimitDirect( + `chat-otp:email:${deployment.id}:${email.toLowerCase()}`, + OTP_EMAIL_RATE_LIMIT + ) + if (!emailRateLimit.allowed) { + logger.warn( + `[${requestId}] OTP email rate limit exceeded for ${email} on chat ${deployment.id}` + ) + const retryAfter = Math.ceil( + (emailRateLimit.retryAfterMs ?? OTP_EMAIL_RATE_LIMIT.refillIntervalMs) / 1000 + ) + const response = createErrorResponse( + 'Too many verification code requests. Please try again later.', + 429 + ) + response.headers.set('Retry-After', String(retryAfter)) + return addCorsHeaders(response, request) + } + const otp = generateOTP() await storeOTP(email, deployment.id, otp) diff --git a/apps/sim/app/api/copilot/training/examples/route.ts b/apps/sim/app/api/copilot/training/examples/route.ts index 1e6a5aa6574..7f68cf812c4 100644 --- a/apps/sim/app/api/copilot/training/examples/route.ts +++ b/apps/sim/app/api/copilot/training/examples/route.ts @@ -1,10 +1,7 @@ import { createLogger } from '@sim/logger' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' -import { - authenticateCopilotRequestSessionOnly, - createUnauthorizedResponse, -} from '@/lib/copilot/request/http' +import { checkInternalApiKey, createUnauthorizedResponse } from '@/lib/copilot/request/http' import { env } from '@/lib/core/config/env' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' @@ -21,8 +18,8 @@ const TrainingExampleSchema = z.object({ }) export const POST = withRouteHandler(async (request: NextRequest) => { - const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly() - if (!isAuthenticated || !userId) { + const auth = checkInternalApiKey(request) + if (!auth.success) { return createUnauthorizedResponse() } diff --git a/apps/sim/app/api/copilot/training/route.ts b/apps/sim/app/api/copilot/training/route.ts index 1c1e64ab0e9..637928b23a9 100644 --- a/apps/sim/app/api/copilot/training/route.ts +++ b/apps/sim/app/api/copilot/training/route.ts @@ -1,10 +1,7 @@ import { createLogger } from '@sim/logger' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' -import { - authenticateCopilotRequestSessionOnly, - createUnauthorizedResponse, -} from '@/lib/copilot/request/http' +import { checkInternalApiKey, createUnauthorizedResponse } from '@/lib/copilot/request/http' import { env } from '@/lib/core/config/env' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' @@ -27,8 +24,8 @@ const TrainingDataSchema = z.object({ }) export const POST = withRouteHandler(async (request: NextRequest) => { - const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly() - if (!isAuthenticated || !userId) { + const auth = checkInternalApiKey(request) + if (!auth.success) { return createUnauthorizedResponse() } diff --git a/apps/sim/app/api/environment/route.ts b/apps/sim/app/api/environment/route.ts index 7d74c421262..9bd5e3d41c1 100644 --- a/apps/sim/app/api/environment/route.ts +++ b/apps/sim/app/api/environment/route.ts @@ -120,17 +120,22 @@ export const GET = withRouteHandler(async (request: Request) => { } const encryptedVariables = result[0].variables as Record - const decryptedVariables: Record = {} - - for (const [key, encryptedValue] of Object.entries(encryptedVariables)) { - try { - const { decrypted } = await decryptSecret(encryptedValue) - decryptedVariables[key] = { key, value: decrypted } - } catch (error) { - logger.error(`[${requestId}] Error decrypting variable ${key}`, error) - decryptedVariables[key] = { key, value: '' } - } - } + + const decryptedEntries = await Promise.all( + Object.entries(encryptedVariables).map(async ([key, encryptedValue]) => { + try { + const { decrypted } = await decryptSecret(encryptedValue) + return [key, { key, value: decrypted }] as const + } catch (error) { + logger.error(`[${requestId}] Error decrypting variable ${key}`, error) + return [key, { key, value: '' }] as const + } + }) + ) + const decryptedVariables = Object.fromEntries(decryptedEntries) as Record< + string, + EnvironmentVariable + > return NextResponse.json({ data: decryptedVariables }, { status: 200 }) } catch (error: any) { diff --git a/apps/sim/app/api/function/execute/route.test.ts b/apps/sim/app/api/function/execute/route.test.ts index 1176523c1c5..8b53c5eb057 100644 --- a/apps/sim/app/api/function/execute/route.test.ts +++ b/apps/sim/app/api/function/execute/route.test.ts @@ -191,7 +191,7 @@ describe('Function Execute API Route', () => { const response = await POST(req) const data = await response.json() - if (response.status === 500) { + if (response.status === 422 || response.status === 500) { expect(data.success).toBe(false) } else { const result = data.output?.result @@ -504,7 +504,7 @@ describe('Function Execute API Route', () => { const response = await POST(req) const data = await response.json() - expect(response.status).toBe(500) + expect(response.status).toBe(422) expect(data.success).toBe(false) expect(data.error).toBeTruthy() }) @@ -518,7 +518,7 @@ describe('Function Execute API Route', () => { const response = await POST(req) const data = await response.json() - expect(response.status).toBe(500) + expect(response.status).toBe(422) expect(data.success).toBe(false) expect(data.error).toContain('Type Error') expect(data.error).toContain('Cannot read properties of null') @@ -533,7 +533,7 @@ describe('Function Execute API Route', () => { const response = await POST(req) const data = await response.json() - expect(response.status).toBe(500) + expect(response.status).toBe(422) expect(data.success).toBe(false) expect(data.error).toContain('Reference Error') expect(data.error).toContain('undefinedVariable is not defined') @@ -548,7 +548,7 @@ describe('Function Execute API Route', () => { const response = await POST(req) const data = await response.json() - expect(response.status).toBe(500) + expect(response.status).toBe(422) expect(data.success).toBe(false) expect(data.error).toContain('Custom error message') }) @@ -562,7 +562,7 @@ describe('Function Execute API Route', () => { const response = await POST(req) const data = await response.json() - expect(response.status).toBe(500) + expect(response.status).toBe(422) expect(data.success).toBe(false) expect(data.error).toBeTruthy() }) diff --git a/apps/sim/app/api/function/execute/route.ts b/apps/sim/app/api/function/execute/route.ts index 63dfbff136b..680a1d158c0 100644 --- a/apps/sim/app/api/function/execute/route.ts +++ b/apps/sim/app/api/function/execute/route.ts @@ -1088,9 +1088,12 @@ export const POST = withRouteHandler(async (req: NextRequest) => { const executionTime = Date.now() - startTime if (isolatedResult.error) { - logger.error(`[${requestId}] Function execution failed in isolated-vm`, { + const isSystemError = isolatedResult.error.isSystemError === true + const logFn = isSystemError ? logger.error.bind(logger) : logger.warn.bind(logger) + logFn(`[${requestId}] Function execution failed in isolated-vm`, { error: isolatedResult.error, executionTime, + isSystemError, }) const ivmError = isolatedResult.error @@ -1119,7 +1122,8 @@ export const POST = withRouteHandler(async (req: NextRequest) => { resolvedCode ) - logger.error(`[${requestId}] Enhanced error details`, { + const detailLogFn = isSystemError ? logger.error.bind(logger) : logger.warn.bind(logger) + detailLogFn(`[${requestId}] Enhanced error details`, { originalMessage: ivmError.message, enhancedMessage: userFriendlyErrorMessage, line: enhancedError.line, @@ -1145,7 +1149,7 @@ export const POST = withRouteHandler(async (req: NextRequest) => { stack: enhancedError.stack, }, }, - { status: 500 } + { status: isSystemError ? 500 : 422 } ) } diff --git a/apps/sim/app/api/invitations/[id]/resend/route.ts b/apps/sim/app/api/invitations/[id]/resend/route.ts index 1841f93118a..99c0721844d 100644 --- a/apps/sim/app/api/invitations/[id]/resend/route.ts +++ b/apps/sim/app/api/invitations/[id]/resend/route.ts @@ -146,6 +146,7 @@ export const POST = withRouteHandler( targetEmail: inv.email, targetRole: inv.role, kind: inv.kind, + membershipIntent: inv.membershipIntent, }, request, }) diff --git a/apps/sim/app/api/invitations/[id]/route.ts b/apps/sim/app/api/invitations/[id]/route.ts index 8e08cfc89dc..532a3c2cbb6 100644 --- a/apps/sim/app/api/invitations/[id]/route.ts +++ b/apps/sim/app/api/invitations/[id]/route.ts @@ -54,6 +54,7 @@ export const GET = withRouteHandler( email: inv.email, organizationId: inv.organizationId, organizationName: inv.organizationName, + membershipIntent: inv.membershipIntent, role: inv.role, status: inv.status, expiresAt: inv.expiresAt, @@ -121,6 +122,12 @@ export const PATCH = withRouteHandler( const { role, grants } = parsed.data if (role !== undefined) { + if (inv.membershipIntent === 'external') { + return NextResponse.json( + { error: 'Role updates are not valid on external workspace invitations' }, + { status: 400 } + ) + } if (!inv.organizationId) { return NextResponse.json( { error: 'Role updates are only valid on organization-scoped invitations' }, @@ -187,6 +194,7 @@ export const PATCH = withRouteHandler( invitationId: id, targetEmail: inv.email, kind: inv.kind, + membershipIntent: inv.membershipIntent, roleUpdate: role ?? null, grantUpdates: grantsToApply, }, diff --git a/apps/sim/app/api/mcp/copilot/route.ts b/apps/sim/app/api/mcp/copilot/route.ts index 6ae73c4126d..93e24c23086 100644 --- a/apps/sim/app/api/mcp/copilot/route.ts +++ b/apps/sim/app/api/mcp/copilot/route.ts @@ -1,5 +1,5 @@ import { Server } from '@modelcontextprotocol/sdk/server/index.js' -import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js' +import { WebStandardStreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/webStandardStreamableHttp.js' import { CallToolRequestSchema, type CallToolResult, @@ -166,16 +166,6 @@ function createError(id: RequestId, code: ErrorCode | number, message: string): } } -function normalizeRequestHeaders(request: NextRequest): HeaderMap { - const headers: HeaderMap = {} - - request.headers.forEach((value, key) => { - headers[key.toLowerCase()] = value - }) - - return headers -} - function readHeader(headers: HeaderMap | undefined, name: string): string | undefined { if (!headers) return undefined const value = headers[name.toLowerCase()] @@ -185,190 +175,6 @@ function readHeader(headers: HeaderMap | undefined, name: string): string | unde return value } -class NextResponseCapture { - private _status = 200 - private _headers = new Headers() - private _controller: ReadableStreamDefaultController | null = null - private _pendingChunks: Uint8Array[] = [] - private _closeHandlers: Array<() => void> = [] - private _errorHandlers: Array<(error: Error) => void> = [] - private _headersWritten = false - private _ended = false - private _headersPromise: Promise - private _resolveHeaders: (() => void) | null = null - private _endedPromise: Promise - private _resolveEnded: (() => void) | null = null - readonly readable: ReadableStream - - constructor() { - this._headersPromise = new Promise((resolve) => { - this._resolveHeaders = resolve - }) - - this._endedPromise = new Promise((resolve) => { - this._resolveEnded = resolve - }) - - this.readable = new ReadableStream({ - start: (controller) => { - this._controller = controller - if (this._pendingChunks.length > 0) { - for (const chunk of this._pendingChunks) { - controller.enqueue(chunk) - } - this._pendingChunks = [] - } - }, - cancel: () => { - this._ended = true - this._resolveEnded?.() - this.triggerCloseHandlers() - }, - }) - } - - private markHeadersWritten(): void { - if (this._headersWritten) return - this._headersWritten = true - this._resolveHeaders?.() - } - - private triggerCloseHandlers(): void { - for (const handler of this._closeHandlers) { - try { - handler() - } catch (error) { - this.triggerErrorHandlers(toError(error)) - } - } - } - - private triggerErrorHandlers(error: Error): void { - for (const errorHandler of this._errorHandlers) { - errorHandler(error) - } - } - - private normalizeChunk(chunk: unknown): Uint8Array | null { - if (typeof chunk === 'string') { - return new TextEncoder().encode(chunk) - } - - if (chunk instanceof Uint8Array) { - return chunk - } - - if (chunk === undefined || chunk === null) { - return null - } - - return new TextEncoder().encode(String(chunk)) - } - - writeHead(status: number, headers?: Record): this { - this._status = status - - if (headers) { - Object.entries(headers).forEach(([key, value]) => { - if (Array.isArray(value)) { - this._headers.set(key, value.join(', ')) - } else { - this._headers.set(key, String(value)) - } - }) - } - - this.markHeadersWritten() - return this - } - - flushHeaders(): this { - this.markHeadersWritten() - return this - } - - write(chunk: unknown): boolean { - const normalized = this.normalizeChunk(chunk) - if (!normalized) return true - - this.markHeadersWritten() - - if (this._controller) { - try { - this._controller.enqueue(normalized) - } catch (error) { - this.triggerErrorHandlers(toError(error)) - } - } else { - this._pendingChunks.push(normalized) - } - - return true - } - - end(chunk?: unknown): this { - if (chunk !== undefined) this.write(chunk) - this.markHeadersWritten() - if (this._ended) return this - - this._ended = true - this._resolveEnded?.() - - if (this._controller) { - try { - this._controller.close() - } catch (error) { - this.triggerErrorHandlers(toError(error)) - } - } - - this.triggerCloseHandlers() - - return this - } - - async waitForHeaders(timeoutMs = 30000): Promise { - if (this._headersWritten) return - - await Promise.race([ - this._headersPromise, - new Promise((resolve) => { - setTimeout(resolve, timeoutMs) - }), - ]) - } - - async waitForEnd(timeoutMs = 30000): Promise { - if (this._ended) return - - await Promise.race([ - this._endedPromise, - new Promise((resolve) => { - setTimeout(resolve, timeoutMs) - }), - ]) - } - - on(event: 'close' | 'error', handler: (() => void) | ((error: Error) => void)): this { - if (event === 'close') { - this._closeHandlers.push(handler as () => void) - } - - if (event === 'error') { - this._errorHandlers.push(handler as (error: Error) => void) - } - - return this - } - - toNextResponse(): NextResponse { - return new NextResponse(this.readable, { - status: this._status, - headers: this._headers, - }) - } -} - function buildMcpServer(abortSignal?: AbortSignal): Server { const server = new Server( { @@ -503,29 +309,17 @@ function buildMcpServer(abortSignal?: AbortSignal): Server { async function handleMcpRequestWithSdk( request: NextRequest, parsedBody: unknown -): Promise { +): Promise { const server = buildMcpServer(request.signal) - const transport = new StreamableHTTPServerTransport({ + const transport = new WebStandardStreamableHTTPServerTransport({ sessionIdGenerator: undefined, enableJsonResponse: true, }) - const responseCapture = new NextResponseCapture() - const requestAdapter = { - method: request.method, - headers: normalizeRequestHeaders(request), - } - await server.connect(transport) try { - await transport.handleRequest(requestAdapter as any, responseCapture as any, parsedBody) - await responseCapture.waitForHeaders() - // Must exceed the longest possible tool execution. - // Using ORCHESTRATION_TIMEOUT_MS + 60 s buffer so the orchestrator can - // finish or time-out on its own before the transport is torn down. - await responseCapture.waitForEnd(ORCHESTRATION_TIMEOUT_MS + 60_000) - return responseCapture.toNextResponse() + return await transport.handleRequest(request, { parsedBody }) } finally { await server.close().catch(() => {}) await transport.close().catch(() => {}) @@ -567,6 +361,13 @@ export const POST = withRouteHandler(async (request: NextRequest) => { return await handleMcpRequestWithSdk(request, parsedBody) } catch (error) { + if (request.signal.aborted || (error as Error)?.name === 'AbortError') { + return NextResponse.json( + createError(0, ErrorCode.ConnectionClosed, 'Client cancelled request'), + { status: 499 } + ) + } + logger.error('Error handling MCP request', { error }) return NextResponse.json(createError(0, ErrorCode.InternalError, 'Internal error'), { status: 500, diff --git a/apps/sim/app/api/organizations/[id]/members/[memberId]/route.ts b/apps/sim/app/api/organizations/[id]/members/[memberId]/route.ts index 971b1e57c79..f672686ac7d 100644 --- a/apps/sim/app/api/organizations/[id]/members/[memberId]/route.ts +++ b/apps/sim/app/api/organizations/[id]/members/[memberId]/route.ts @@ -8,7 +8,11 @@ import { z } from 'zod' import { getSession } from '@/lib/auth' import { setActiveOrganizationForCurrentSession } from '@/lib/auth/active-organization' import { getUserUsageData } from '@/lib/billing/core/usage' -import { removeUserFromOrganization } from '@/lib/billing/organizations/membership' +import { + removeExternalUserFromOrganizationWorkspaces, + removeUserFromOrganization, +} from '@/lib/billing/organizations/membership' +import { reduceOrganizationSeatsByOne } from '@/lib/billing/organizations/seats' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' const logger = createLogger('OrganizationMemberAPI') @@ -282,6 +286,7 @@ export const DELETE = withRouteHandler( } const { id: organizationId, memberId: targetUserId } = await params + const shouldReduceSeats = request.nextUrl.searchParams.get('shouldReduceSeats') === 'true' const userMember = await db .select() @@ -311,7 +316,79 @@ export const DELETE = withRouteHandler( .limit(1) if (targetMember.length === 0) { - return NextResponse.json({ error: 'Member not found' }, { status: 404 }) + const [targetUser] = await db + .select({ id: user.id, email: user.email, name: user.name }) + .from(user) + .where(eq(user.id, targetUserId)) + .limit(1) + + if (!targetUser) { + return NextResponse.json({ error: 'Member not found' }, { status: 404 }) + } + + const externalResult = await removeExternalUserFromOrganizationWorkspaces({ + userId: targetUserId, + organizationId, + }) + + if (!externalResult.success) { + const error = externalResult.error || 'External workspace member not found' + const status = + error === 'External workspace member not found' + ? 404 + : error === 'User is an organization member' + ? 409 + : 500 + + return NextResponse.json({ error }, { status }) + } + + logger.info('External workspace member removed from organization workspaces', { + organizationId, + removedMemberId: targetUserId, + removedBy: session.user.id, + workspaceAccessRevoked: externalResult.workspaceAccessRevoked, + permissionGroupsRevoked: externalResult.permissionGroupsRevoked, + credentialMembershipsRevoked: externalResult.credentialMembershipsRevoked, + pendingInvitationsCancelled: externalResult.pendingInvitationsCancelled, + }) + + recordAudit({ + workspaceId: null, + actorId: session.user.id, + action: AuditAction.ORG_MEMBER_REMOVED, + resourceType: AuditResourceType.ORGANIZATION, + resourceId: organizationId, + actorName: session.user.name ?? undefined, + actorEmail: session.user.email ?? undefined, + description: `Removed external workspace member ${targetUserId} from organization`, + metadata: { + targetUserId, + targetEmail: targetUser.email ?? undefined, + targetName: targetUser.name ?? undefined, + membershipType: 'external', + workspaceAccessRevoked: externalResult.workspaceAccessRevoked, + permissionGroupsRevoked: externalResult.permissionGroupsRevoked, + credentialMembershipsRevoked: externalResult.credentialMembershipsRevoked, + pendingInvitationsCancelled: externalResult.pendingInvitationsCancelled, + }, + request, + }) + + return NextResponse.json({ + success: true, + message: 'External member removed successfully', + data: { + removedMemberId: targetUserId, + removedBy: session.user.id, + removedAt: new Date().toISOString(), + membershipType: 'external', + workspaceAccessRevoked: externalResult.workspaceAccessRevoked, + permissionGroupsRevoked: externalResult.permissionGroupsRevoked, + credentialMembershipsRevoked: externalResult.credentialMembershipsRevoked, + pendingInvitationsCancelled: externalResult.pendingInvitationsCancelled, + }, + }) } const result = await removeUserFromOrganization({ @@ -330,6 +407,28 @@ export const DELETE = withRouteHandler( return NextResponse.json({ error: result.error }, { status: 500 }) } + let seatReduction: Awaited> | null = null + if (shouldReduceSeats && session.user.id !== targetUserId) { + try { + seatReduction = await reduceOrganizationSeatsByOne({ + organizationId, + actorUserId: session.user.id, + removedUserId: targetUserId, + }) + } catch (seatError) { + logger.error('Failed to reduce seats after member removal', { + organizationId, + removedMemberId: targetUserId, + removedBy: session.user.id, + error: seatError, + }) + seatReduction = { + reduced: false, + reason: 'Failed to reduce seats after member removal', + } + } + } + if (session.user.id === targetUserId) { try { await setActiveOrganizationForCurrentSession(null) @@ -348,6 +447,7 @@ export const DELETE = withRouteHandler( removedBy: session.user.id, wasSelfRemoval: session.user.id === targetUserId, billingActions: result.billingActions, + seatReduction, }) recordAudit({ @@ -367,6 +467,7 @@ export const DELETE = withRouteHandler( targetEmail: targetMember[0].email ?? undefined, targetName: targetMember[0].name ?? undefined, wasSelfRemoval: session.user.id === targetUserId, + seatReduction, }, request, }) @@ -381,6 +482,7 @@ export const DELETE = withRouteHandler( removedMemberId: targetUserId, removedBy: session.user.id, removedAt: new Date().toISOString(), + seatReduction, }, }) } catch (error) { diff --git a/apps/sim/app/api/organizations/[id]/roster/route.ts b/apps/sim/app/api/organizations/[id]/roster/route.ts index c1abe4d6d15..86229747db7 100644 --- a/apps/sim/app/api/organizations/[id]/roster/route.ts +++ b/apps/sim/app/api/organizations/[id]/roster/route.ts @@ -8,7 +8,7 @@ import { workspace, } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq, inArray, sql } from 'drizzle-orm' +import { and, eq, inArray, isNull, sql } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { getSession } from '@/lib/auth' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' @@ -57,7 +57,7 @@ export const GET = withRouteHandler( const orgWorkspaces = await db .select({ id: workspace.id, name: workspace.name }) .from(workspace) - .where(eq(workspace.organizationId, organizationId)) + .where(and(eq(workspace.organizationId, organizationId), isNull(workspace.archivedAt))) const orgWorkspaceIds = orgWorkspaces.map((ws) => ws.id) const workspaceNameById = new Map(orgWorkspaces.map((ws) => [ws.id, ws.name])) @@ -118,12 +118,82 @@ export const GET = withRouteHandler( workspaces: permissionsByUser.get(row.userId) ?? [], })) + const externalPermissionRows = + orgWorkspaceIds.length > 0 + ? await db + .select({ + userId: user.id, + userName: user.name, + userEmail: user.email, + userImage: user.image, + workspaceId: permissions.entityId, + permission: permissions.permissionType, + createdAt: permissions.createdAt, + }) + .from(permissions) + .innerJoin(user, eq(permissions.userId, user.id)) + .leftJoin( + member, + and(eq(member.userId, user.id), eq(member.organizationId, organizationId)) + ) + .where( + and( + eq(permissions.entityType, 'workspace'), + inArray(permissions.entityId, orgWorkspaceIds), + isNull(member.id) + ) + ) + : [] + + const externalMembersByUser = new Map< + string, + { + memberId: string + userId: string + role: 'external' + createdAt: Date + name: string + email: string + image: string | null + workspaces: RosterWorkspaceAccess[] + } + >() + + for (const row of externalPermissionRows) { + const existing = externalMembersByUser.get(row.userId) + const workspaceAccess: RosterWorkspaceAccess = { + workspaceId: row.workspaceId, + workspaceName: workspaceNameById.get(row.workspaceId) ?? 'Workspace', + permission: row.permission, + } + + if (existing) { + existing.workspaces.push(workspaceAccess) + if (row.createdAt < existing.createdAt) existing.createdAt = row.createdAt + continue + } + + externalMembersByUser.set(row.userId, { + memberId: `external-${row.userId}`, + userId: row.userId, + role: 'external', + createdAt: row.createdAt, + name: row.userName, + email: row.userEmail, + image: row.userImage, + workspaces: [workspaceAccess], + }) + } + + const rosterMembers = [...members, ...externalMembersByUser.values()] + const pendingInvitationRows = await db .select({ id: invitation.id, email: invitation.email, role: invitation.role, kind: invitation.kind, + membershipIntent: invitation.membershipIntent, createdAt: invitation.createdAt, expiresAt: invitation.expiresAt, inviteeName: user.name, @@ -160,8 +230,9 @@ export const GET = withRouteHandler( const pendingInvitations = pendingInvitationRows.map((row) => ({ id: row.id, email: row.email, - role: row.role, + role: row.membershipIntent === 'external' ? 'external' : row.role, kind: row.kind, + membershipIntent: row.membershipIntent, createdAt: row.createdAt, expiresAt: row.expiresAt, inviteeName: row.inviteeName, @@ -172,7 +243,7 @@ export const GET = withRouteHandler( return NextResponse.json({ success: true, data: { - members, + members: rosterMembers, pendingInvitations, workspaces: orgWorkspaces, }, diff --git a/apps/sim/app/api/organizations/[id]/seats/route.ts b/apps/sim/app/api/organizations/[id]/seats/route.ts index 0cfba281c62..cce91dfc8a8 100644 --- a/apps/sim/app/api/organizations/[id]/seats/route.ts +++ b/apps/sim/app/api/organizations/[id]/seats/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { invitation, member, organization, subscription } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, count, eq, inArray } from 'drizzle-orm' +import { and, count, eq, gt, inArray, ne } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { getSession } from '@/lib/auth' @@ -116,7 +116,14 @@ export const PUT = withRouteHandler( const [pendingCountRow] = await db .select({ count: count() }) .from(invitation) - .where(and(eq(invitation.organizationId, organizationId), eq(invitation.status, 'pending'))) + .where( + and( + eq(invitation.organizationId, organizationId), + eq(invitation.status, 'pending'), + ne(invitation.membershipIntent, 'external'), + gt(invitation.expiresAt, new Date()) + ) + ) const memberCount = memberCountRow?.count ?? 0 const pendingCount = pendingCountRow?.count ?? 0 diff --git a/apps/sim/app/api/table/[tableId]/rows/route.ts b/apps/sim/app/api/table/[tableId]/rows/route.ts index 99e467a20c6..a3db48c875e 100644 --- a/apps/sim/app/api/table/[tableId]/rows/route.ts +++ b/apps/sim/app/api/table/[tableId]/rows/route.ts @@ -232,7 +232,7 @@ export const POST = withRouteHandler( 'rows' in body && Array.isArray((body as Record).rows) ) { - return handleBatchInsert( + return await handleBatchInsert( requestId, tableId, body as z.infer, diff --git a/apps/sim/app/api/tools/sap_s4hana/proxy/route.ts b/apps/sim/app/api/tools/sap_s4hana/proxy/route.ts new file mode 100644 index 00000000000..3ca70fb7f42 --- /dev/null +++ b/apps/sim/app/api/tools/sap_s4hana/proxy/route.ts @@ -0,0 +1,614 @@ +import { createHash } from 'node:crypto' +import { createLogger } from '@sim/logger' +import { toError } from '@sim/utils/errors' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { withRouteHandler } from '@/lib/core/utils/with-route-handler' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('SapS4HanaProxyAPI') + +const HttpMethod = z.enum(['GET', 'POST', 'PATCH', 'PUT', 'DELETE', 'MERGE']) +const DeploymentType = z.enum(['cloud_public', 'cloud_private', 'on_premise']) +const AuthType = z.enum(['oauth_client_credentials', 'basic']) + +const ServiceName = z + .string() + .min(1, 'service is required') + .regex( + /^[A-Z][A-Z0-9_]*(;v=\d+)?$/, + 'service must be an uppercase OData service name optionally suffixed with ";v=NNNN" (e.g., API_BUSINESS_PARTNER, API_OUTBOUND_DELIVERY_SRV;v=0002)' + ) + +const ServicePath = z + .string() + .min(1, 'path is required') + .refine( + (p) => + !p.split(/[/\\]/).some((seg) => seg === '..' || seg === '.') && + !p.includes('?') && + !p.includes('#') && + !/%(?:2[eEfF]|5[cC]|3[fF]|23)/.test(p), + { + message: + 'path must not contain ".." or "." segments, "?", "#", or percent-encoded path/query/fragment characters', + } + ) + +const Subdomain = z + .string() + .regex( + /^[a-z0-9]([a-z0-9-]{0,61}[a-z0-9])?$/i, + 'subdomain must contain only letters, digits, and hyphens (1-63 chars)' + ) + +const ProxyRequestSchema = z + .object({ + deploymentType: DeploymentType.default('cloud_public'), + authType: AuthType.default('oauth_client_credentials'), + subdomain: Subdomain.optional(), + region: z + .string() + .regex(/^[a-z]{2,4}\d{1,3}$/i, 'region must be an SAP BTP region code (e.g., eu10, us30)') + .optional(), + baseUrl: z.string().optional(), + tokenUrl: z.string().optional(), + clientId: z.string().optional(), + clientSecret: z.string().optional(), + username: z.string().optional(), + password: z.string().optional(), + service: ServiceName, + path: ServicePath, + method: HttpMethod.default('GET'), + query: z.record(z.union([z.string(), z.number(), z.boolean()])).optional(), + body: z.unknown().optional(), + ifMatch: z.string().optional(), + }) + .superRefine((req, ctx) => { + if (req.deploymentType === 'cloud_public') { + if (!req.subdomain) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ['subdomain'], + message: 'subdomain is required for cloud_public deployment', + }) + } + if (!req.region) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ['region'], + message: 'region is required for cloud_public deployment', + }) + } + if (req.authType !== 'oauth_client_credentials') { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ['authType'], + message: 'cloud_public deployment only supports oauth_client_credentials', + }) + } + if (!req.clientId) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ['clientId'], + message: 'clientId is required', + }) + } + if (!req.clientSecret) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ['clientSecret'], + message: 'clientSecret is required', + }) + } + } else { + if (!req.baseUrl) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ['baseUrl'], + message: 'baseUrl is required for cloud_private and on_premise deployments', + }) + } else { + const baseUrlCheck = checkExternalUrlSafety(req.baseUrl, 'baseUrl') + if (!baseUrlCheck.ok) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ['baseUrl'], + message: baseUrlCheck.message, + }) + } + } + if (req.authType === 'oauth_client_credentials') { + if (!req.tokenUrl) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ['tokenUrl'], + message: 'tokenUrl is required for OAuth on cloud_private/on_premise', + }) + } else { + const tokenUrlCheck = checkExternalUrlSafety(req.tokenUrl, 'tokenUrl') + if (!tokenUrlCheck.ok) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ['tokenUrl'], + message: tokenUrlCheck.message, + }) + } + } + if (!req.clientId) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ['clientId'], + message: 'clientId is required for OAuth', + }) + } + if (!req.clientSecret) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ['clientSecret'], + message: 'clientSecret is required for OAuth', + }) + } + } else { + if (!req.username) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ['username'], + message: 'username is required for Basic auth', + }) + } + if (!req.password) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ['password'], + message: 'password is required for Basic auth', + }) + } + } + } + }) + +type ProxyRequest = z.infer + +interface CachedToken { + accessToken: string + expiresAt: number +} + +const TOKEN_CACHE = new Map() +const TOKEN_CACHE_MAX_ENTRIES = 500 +const TOKEN_SAFETY_WINDOW_MS = 60_000 +const OUTBOUND_FETCH_TIMEOUT_MS = 30_000 + +const FORBIDDEN_HOSTS = new Set([ + 'localhost', + '0.0.0.0', + '127.0.0.1', + '169.254.169.254', + 'metadata.google.internal', + 'metadata', + '[::1]', + '[::]', + '[::ffff:127.0.0.1]', + '[fd00:ec2::254]', +]) + +function isPrivateIPv4(host: string): boolean { + const match = host.match(/^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/) + if (!match) return false + const octets = match.slice(1, 5).map(Number) as [number, number, number, number] + if (octets.some((o) => o < 0 || o > 255)) return false + const [a, b] = octets + if (a === 10) return true + if (a === 172 && b >= 16 && b <= 31) return true + if (a === 192 && b === 168) return true + if (a === 127) return true + if (a === 169 && b === 254) return true + if (a === 0) return true + return false +} + +function extractIPv4MappedHost(host: string): string | null { + const stripped = host.startsWith('[') && host.endsWith(']') ? host.slice(1, -1) : host + const lower = stripped.toLowerCase() + for (const prefix of ['::ffff:', '::']) { + if (lower.startsWith(prefix)) { + const candidate = lower.slice(prefix.length) + if (/^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$/.test(candidate)) return candidate + } + } + const hexMatch = lower.match(/^::ffff:([0-9a-f]{1,4}):([0-9a-f]{1,4})$/) + if (hexMatch) { + const high = Number.parseInt(hexMatch[1] as string, 16) + const low = Number.parseInt(hexMatch[2] as string, 16) + if (high >= 0 && high <= 0xffff && low >= 0 && low <= 0xffff) { + const a = (high >> 8) & 0xff + const b = high & 0xff + const c = (low >> 8) & 0xff + const d = low & 0xff + return `${a}.${b}.${c}.${d}` + } + } + return null +} + +function isPrivateOrLoopbackIPv6(host: string): boolean { + const stripped = host.startsWith('[') && host.endsWith(']') ? host.slice(1, -1) : host + const lower = stripped.toLowerCase() + if (lower === '::' || lower === '::1') return true + if (/^fc[0-9a-f]{2}:/.test(lower) || /^fd[0-9a-f]{2}:/.test(lower)) return true + if (lower.startsWith('fe80:')) return true + return false +} + +function checkExternalUrlSafety( + rawUrl: string, + label: string +): { ok: true; url: URL } | { ok: false; message: string } { + let parsed: URL + try { + parsed = new URL(rawUrl) + } catch { + return { ok: false, message: `${label} must be a valid URL` } + } + if (parsed.protocol !== 'https:') { + return { ok: false, message: `${label} must use https://` } + } + const host = parsed.hostname.toLowerCase() + if (FORBIDDEN_HOSTS.has(host) || FORBIDDEN_HOSTS.has(`[${host}]`)) { + return { ok: false, message: `${label} host is not allowed` } + } + if (isPrivateIPv4(host)) { + return { ok: false, message: `${label} host is not allowed (private/loopback range)` } + } + const mapped = extractIPv4MappedHost(host) + if (mapped && isPrivateIPv4(mapped)) { + return { ok: false, message: `${label} host is not allowed (IPv4-mapped private range)` } + } + if (isPrivateOrLoopbackIPv6(host)) { + return { ok: false, message: `${label} host is not allowed (IPv6 private/loopback)` } + } + return { ok: true, url: parsed } +} + +function assertSafeExternalUrl(rawUrl: string, label: string): URL { + const result = checkExternalUrlSafety(rawUrl, label) + if (!result.ok) throw new Error(result.message) + return result.url +} + +function resolveTokenUrl(req: ProxyRequest): string { + if (req.deploymentType === 'cloud_public') { + return `https://${req.subdomain}.authentication.${req.region}.hana.ondemand.com/oauth/token` + } + if (!req.tokenUrl) { + throw new Error('tokenUrl is required for OAuth on cloud_private/on_premise') + } + return req.tokenUrl +} + +function tokenCacheKey(req: ProxyRequest): string { + const secretHash = req.clientSecret + ? createHash('sha256').update(req.clientSecret).digest('hex').slice(0, 16) + : '' + return `${resolveTokenUrl(req)}::${req.clientId ?? ''}::${secretHash}` +} + +function rememberToken(key: string, token: CachedToken): void { + if (TOKEN_CACHE.has(key)) TOKEN_CACHE.delete(key) + TOKEN_CACHE.set(key, token) + while (TOKEN_CACHE.size > TOKEN_CACHE_MAX_ENTRIES) { + const oldestKey = TOKEN_CACHE.keys().next().value + if (oldestKey === undefined) break + TOKEN_CACHE.delete(oldestKey) + } +} + +async function fetchAccessToken(req: ProxyRequest, requestId: string): Promise { + const cacheKey = tokenCacheKey(req) + const cached = TOKEN_CACHE.get(cacheKey) + if (cached && cached.expiresAt - TOKEN_SAFETY_WINDOW_MS > Date.now()) { + return cached.accessToken + } + + const tokenUrl = assertSafeExternalUrl(resolveTokenUrl(req), 'tokenUrl').toString() + const basic = Buffer.from(`${req.clientId}:${req.clientSecret}`).toString('base64') + + const response = await fetch(tokenUrl, { + method: 'POST', + headers: { + Authorization: `Basic ${basic}`, + 'Content-Type': 'application/x-www-form-urlencoded', + Accept: 'application/json', + }, + body: 'grant_type=client_credentials', + signal: AbortSignal.timeout(OUTBOUND_FETCH_TIMEOUT_MS), + }) + + if (!response.ok) { + const text = await response.text().catch(() => '') + logger.warn(`[${requestId}] Token fetch failed (${response.status}): ${text}`) + throw new Error(`SAP token request failed: HTTP ${response.status}`) + } + + const data = (await response.json()) as { + access_token?: string + expires_in?: number + } + + if (!data.access_token) { + throw new Error('SAP token response missing access_token') + } + + const expiresInMs = (data.expires_in ?? 3600) * 1000 + rememberToken(cacheKey, { + accessToken: data.access_token, + expiresAt: Date.now() + expiresInMs, + }) + return data.access_token +} + +interface CsrfBundle { + token: string + cookie: string +} + +function joinSetCookies(headers: Headers): string { + const cookies = + typeof (headers as { getSetCookie?: () => string[] }).getSetCookie === 'function' + ? (headers as { getSetCookie: () => string[] }).getSetCookie() + : (headers.get('set-cookie') ?? '').split(/,\s*(?=[^=,;\s]+=)/) + return cookies + .map((c) => c.split(';')[0]?.trim()) + .filter(Boolean) + .join('; ') +} + +function buildAuthHeader(req: ProxyRequest, accessToken: string | null): string { + if (req.authType === 'basic') { + const basic = Buffer.from(`${req.username}:${req.password}`).toString('base64') + return `Basic ${basic}` + } + return `Bearer ${accessToken}` +} + +async function fetchCsrf( + req: ProxyRequest, + accessToken: string | null, + requestId: string +): Promise { + const url = buildOdataUrl(req, '/$metadata') + const response = await fetch(url, { + method: 'GET', + headers: { + Authorization: buildAuthHeader(req, accessToken), + Accept: 'application/xml', + 'X-CSRF-Token': 'Fetch', + }, + signal: AbortSignal.timeout(OUTBOUND_FETCH_TIMEOUT_MS), + }) + + if (!response.ok) { + const text = await response.text().catch(() => '') + logger.warn(`[${requestId}] CSRF fetch failed (${response.status}): ${text}`) + return null + } + + const token = response.headers.get('x-csrf-token') + const cookie = joinSetCookies(response.headers) + if (!token) return null + return { token, cookie } +} + +function resolveHost(req: ProxyRequest): string { + if (req.deploymentType === 'cloud_public') { + const constructed = `https://${req.subdomain}-api.s4hana.ondemand.com` + return assertSafeExternalUrl(constructed, 'subdomain').toString().replace(/\/+$/, '') + } + if (!req.baseUrl) { + throw new Error('baseUrl is required for cloud_private and on_premise deployments') + } + const trimmed = req.baseUrl.replace(/\/+$/, '') + return assertSafeExternalUrl(trimmed, 'baseUrl').toString().replace(/\/+$/, '') +} + +function buildOdataUrl(req: ProxyRequest, pathOverride?: string): string { + const host = resolveHost(req) + const servicePath = `/sap/opu/odata/sap/${req.service}` + const subPath = pathOverride ?? req.path + const normalized = subPath.startsWith('/') ? subPath : `/${subPath}` + const base = `${host}${servicePath}${normalized}` + + if (pathOverride !== undefined) { + return base + } + if (!req.query || Object.keys(req.query).length === 0) { + return base + } + const encode = (s: string) => encodeURIComponent(s).replace(/%24/g, '$') + const parts: string[] = [] + for (const [key, value] of Object.entries(req.query)) { + if (value === undefined || value === null) continue + parts.push(`${encode(key)}=${encode(String(value))}`) + } + const queryString = parts.join('&') + if (!queryString) return base + return base.includes('?') ? `${base}&${queryString}` : `${base}?${queryString}` +} + +const WRITE_METHODS = new Set(['POST', 'PUT', 'PATCH', 'DELETE', 'MERGE']) + +interface OdataInvocation { + status: number + body: unknown + raw: string + csrfHeader: string +} + +async function callOdata( + req: ProxyRequest, + accessToken: string | null, + csrf: CsrfBundle | null +): Promise { + const url = buildOdataUrl(req) + const headers: Record = { + Authorization: buildAuthHeader(req, accessToken), + Accept: 'application/json', + } + + const isWrite = WRITE_METHODS.has(req.method) + const hasBody = req.body !== undefined && req.body !== null + if (hasBody) headers['Content-Type'] = 'application/json' + if (req.ifMatch) headers['If-Match'] = req.ifMatch + + if (isWrite && csrf) { + headers['X-CSRF-Token'] = csrf.token + if (csrf.cookie) headers.Cookie = csrf.cookie + } + + const response = await fetch(url, { + method: req.method, + headers, + body: hasBody ? JSON.stringify(req.body) : undefined, + signal: AbortSignal.timeout(OUTBOUND_FETCH_TIMEOUT_MS), + }) + + const raw = await response.text() + let parsed: unknown = null + if (raw.length > 0) { + try { + parsed = JSON.parse(raw) + } catch { + parsed = raw + } + } + + const csrfHeader = response.headers.get('x-csrf-token')?.toLowerCase() ?? '' + return { status: response.status, body: parsed, raw, csrfHeader } +} + +function isCsrfRequired(invocation: OdataInvocation): boolean { + if (invocation.status !== 403) return false + if (invocation.csrfHeader === 'required') return true + if (typeof invocation.body !== 'object' || invocation.body === null) return false + const errorObj = (invocation.body as { error?: { message?: { value?: string } | string } }).error + const messageField = errorObj?.message + const message = typeof messageField === 'string' ? messageField : (messageField?.value ?? '') + return message.toLowerCase().includes('csrf') +} + +function extractOdataError(body: unknown, status: number): string { + if (body && typeof body === 'object') { + const err = ( + body as { + error?: { + message?: { value?: string } | string + code?: string + innererror?: { + errordetails?: Array<{ code?: string; message?: string; severity?: string }> + } + } + } + ).error + if (err) { + const messageField = err.message + const base = + typeof messageField === 'string' ? messageField : (messageField?.value ?? err.code ?? '') + const prefix = err.code ? `[${err.code}] ` : '' + const details = err.innererror?.errordetails + ?.filter((d) => d.message && (!d.severity || d.severity.toLowerCase() !== 'info')) + .map((d) => { + const tag = d.code ? `[${d.code}] ` : '' + return `${tag}${d.message}` + }) + .filter((m): m is string => Boolean(m)) + if (details && details.length > 0) { + const extras = details.filter((d) => !d.endsWith(base)) + return extras.length > 0 ? `${prefix}${base} (${extras.join('; ')})` : `${prefix}${base}` + } + if (base) return `${prefix}${base}` + } + } + if (typeof body === 'string' && body.length > 0) return body + return `SAP request failed with HTTP ${status}` +} + +function unwrapOdata(body: unknown): unknown { + if (!body || typeof body !== 'object') return body + const root = (body as { d?: unknown }).d + if (root === undefined) return body + if (root && typeof root === 'object' && 'results' in (root as Record)) { + const rootObj = root as { results: unknown; __count?: string; __next?: string } + if (rootObj.__count !== undefined || rootObj.__next !== undefined) { + return { + results: rootObj.results, + ...(rootObj.__count !== undefined && { __count: rootObj.__count }), + ...(rootObj.__next !== undefined && { __next: rootObj.__next }), + } + } + return rootObj.results + } + return root +} + +export const POST = withRouteHandler(async (request: NextRequest) => { + const requestId = generateRequestId() + + try { + const authResult = await checkInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success) { + logger.warn(`[${requestId}] Unauthorized SAP proxy request: ${authResult.error}`) + return NextResponse.json( + { success: false, error: authResult.error || 'Authentication required' }, + { status: 401 } + ) + } + + const json = await request.json() + const proxyReq = ProxyRequestSchema.parse(json) + const isWrite = WRITE_METHODS.has(proxyReq.method) + + const accessToken = + proxyReq.authType === 'oauth_client_credentials' + ? await fetchAccessToken(proxyReq, requestId) + : null + const csrf = isWrite ? await fetchCsrf(proxyReq, accessToken, requestId) : null + + let invocation = await callOdata(proxyReq, accessToken, csrf) + + if (isWrite && isCsrfRequired(invocation)) { + logger.info(`[${requestId}] CSRF token rejected, refetching and retrying`) + const refreshed = await fetchCsrf(proxyReq, accessToken, requestId) + if (refreshed) { + invocation = await callOdata(proxyReq, accessToken, refreshed) + } + } + + if (invocation.status >= 200 && invocation.status < 300) { + const data = invocation.status === 204 ? null : unwrapOdata(invocation.body) + return NextResponse.json({ success: true, output: { status: invocation.status, data } }) + } + + const message = extractOdataError(invocation.body, invocation.status) + logger.warn( + `[${requestId}] SAP API error (${invocation.status}) ${proxyReq.service}${proxyReq.path}: ${message}` + ) + return NextResponse.json( + { success: false, error: message, status: invocation.status }, + { status: invocation.status } + ) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Validation error:`, error.errors) + return NextResponse.json( + { success: false, error: error.errors[0]?.message || 'Validation failed' }, + { status: 400 } + ) + } + logger.error(`[${requestId}] Unexpected SAP proxy error:`, error) + return NextResponse.json({ success: false, error: toError(error).message }, { status: 500 }) + } +}) diff --git a/apps/sim/app/api/tools/stagehand/agent/route.ts b/apps/sim/app/api/tools/stagehand/agent/route.ts index afc32d5bc6a..3c17d60eeb4 100644 --- a/apps/sim/app/api/tools/stagehand/agent/route.ts +++ b/apps/sim/app/api/tools/stagehand/agent/route.ts @@ -22,6 +22,8 @@ const requestSchema = z.object({ variables: z.any(), provider: z.enum(['openai', 'anthropic']).optional().default('openai'), apiKey: z.string(), + mode: z.enum(['dom', 'hybrid', 'cua']).optional().default('dom'), + maxSteps: z.number().int().min(1).max(200).optional().default(20), }) /** @@ -121,7 +123,7 @@ export const POST = withRouteHandler(async (request: NextRequest) => { } const params = validationResult.data - const { task, startUrl: rawStartUrl, outputSchema, provider, apiKey } = params + const { task, startUrl: rawStartUrl, outputSchema, provider, apiKey, mode, maxSteps } = params const variablesObject = processVariables(params.variables) const startUrl = normalizeUrl(rawStartUrl) @@ -165,8 +167,10 @@ export const POST = withRouteHandler(async (request: NextRequest) => { return NextResponse.json({ error: 'Invalid Anthropic API key format' }, { status: 400 }) } - const modelName = - provider === 'anthropic' ? 'anthropic/claude-sonnet-4-5-20250929' : 'openai/gpt-5' + const modelName = provider === 'anthropic' ? 'anthropic/claude-sonnet-4-6' : 'openai/gpt-5' + + let sessionId: string | null = null + let liveViewUrl: string | null = null try { logger.info('Initializing Stagehand with Browserbase (v3)', { provider, modelName }) @@ -190,6 +194,35 @@ export const POST = withRouteHandler(async (request: NextRequest) => { await stagehand.init() logger.info('Stagehand initialized successfully') + sessionId = stagehand.browserbaseSessionID ?? null + if (sessionId) { + try { + const debugResponse = await fetch( + `https://api.browserbase.com/v1/sessions/${sessionId}/debug`, + { + method: 'GET', + headers: { + 'X-BB-API-Key': BROWSERBASE_API_KEY, + }, + } + ) + if (debugResponse.ok) { + const debugData = (await debugResponse.json()) as { + debuggerFullscreenUrl?: string + debuggerUrl?: string + } + liveViewUrl = debugData.debuggerFullscreenUrl ?? debugData.debuggerUrl ?? null + if (liveViewUrl) { + logger.info(`Browserbase live view URL: ${liveViewUrl}`) + } + } else { + logger.warn(`Failed to fetch Browserbase debug URL: ${debugResponse.statusText}`) + } + } catch (debugError) { + logger.warn('Error fetching Browserbase debug URL', { error: debugError }) + } + } + const page = stagehand.context.pages()[0] logger.info(`Navigating to ${startUrl}`) await page.goto(startUrl, { waitUntil: 'networkidle' }) @@ -223,13 +256,14 @@ export const POST = withRouteHandler(async (request: NextRequest) => { apiKey: apiKey, }, systemPrompt: agentInstructions, + mode, }) - logger.info('Executing agent task', { task: taskWithVariables }) + logger.info('Executing agent task', { task: taskWithVariables, mode, maxSteps }) const agentExecutionResult = await agent.execute({ instruction: taskWithVariables, - maxSteps: 20, + maxSteps, }) const agentResult = { @@ -293,6 +327,8 @@ export const POST = withRouteHandler(async (request: NextRequest) => { return NextResponse.json({ agentResult, structuredOutput, + liveViewUrl, + sessionId, }) } catch (error) { logger.error('Stagehand agent execution error', { @@ -327,6 +363,8 @@ export const POST = withRouteHandler(async (request: NextRequest) => { { error: errorMessage, details: errorDetails, + liveViewUrl, + sessionId, }, { status: 500 } ) diff --git a/apps/sim/app/api/tools/stagehand/extract/route.ts b/apps/sim/app/api/tools/stagehand/extract/route.ts index c39f5c78534..1ec99a182d9 100644 --- a/apps/sim/app/api/tools/stagehand/extract/route.ts +++ b/apps/sim/app/api/tools/stagehand/extract/route.ts @@ -17,8 +17,6 @@ const BROWSERBASE_PROJECT_ID = env.BROWSERBASE_PROJECT_ID const requestSchema = z.object({ instruction: z.string(), schema: z.record(z.any()), - useTextExtract: z.boolean().optional().default(false), - selector: z.string().nullable().optional(), provider: z.enum(['openai', 'anthropic']).optional().default('openai'), apiKey: z.string(), url: z.string().url(), @@ -51,7 +49,7 @@ export const POST = withRouteHandler(async (request: NextRequest) => { } const params = validationResult.data - const { url: rawUrl, instruction, selector, provider, apiKey, schema } = params + const { url: rawUrl, instruction, provider, apiKey, schema } = params const url = normalizeUrl(rawUrl) const urlValidation = await validateUrlWithDNS(url, 'url') if (!urlValidation.isValid) { @@ -101,8 +99,7 @@ export const POST = withRouteHandler(async (request: NextRequest) => { } try { - const modelName = - provider === 'anthropic' ? 'anthropic/claude-sonnet-4-5-20250929' : 'openai/gpt-5' + const modelName = provider === 'anthropic' ? 'anthropic/claude-sonnet-4-6' : 'openai/gpt-5' logger.info('Initializing Stagehand with Browserbase (v3)', { provider, modelName }) @@ -162,14 +159,11 @@ export const POST = withRouteHandler(async (request: NextRequest) => { logger.info('Calling stagehand.extract with options', { hasInstruction: !!instruction, hasSchema: !!zodSchema, - hasSelector: !!selector, }) let extractedData if (zodSchema) { - extractedData = await stagehand.extract(instruction, zodSchema, { - selector: selector || undefined, - }) + extractedData = await stagehand.extract(instruction, zodSchema) } else { extractedData = await stagehand.extract(instruction) } diff --git a/apps/sim/app/api/workflows/[id]/execute/route.ts b/apps/sim/app/api/workflows/[id]/execute/route.ts index b6e1aeab7b7..34b99b16744 100644 --- a/apps/sim/app/api/workflows/[id]/execute/route.ts +++ b/apps/sim/app/api/workflows/[id]/execute/route.ts @@ -61,7 +61,7 @@ import type { SerializableExecutionState, } from '@/executor/execution/types' import type { NormalizedBlockOutput, StreamingExecution } from '@/executor/types' -import { hasExecutionResult } from '@/executor/utils/errors' +import { getExecutionErrorStatus, hasExecutionResult } from '@/executor/utils/errors' import { Serializer } from '@/serializer' import { CORE_TRIGGER_TYPES, type CoreTriggerType } from '@/stores/logs/filters/types' @@ -821,6 +821,7 @@ async function handleExecutePost( reqLogger.error(`Non-SSE execution failed: ${errorMessage}`) const executionResult = hasExecutionResult(error) ? error.executionResult : undefined + const status = getExecutionErrorStatus(error) return NextResponse.json( { @@ -835,7 +836,7 @@ async function handleExecutePost( } : undefined, }, - { status: 500 } + { status } ) } finally { timeoutController.cleanup() diff --git a/apps/sim/app/api/workspaces/[id]/_preview/create-preview-route.ts b/apps/sim/app/api/workspaces/[id]/_preview/create-preview-route.ts index a369f9472a9..0e9c6a43e6c 100644 --- a/apps/sim/app/api/workspaces/[id]/_preview/create-preview-route.ts +++ b/apps/sim/app/api/workspaces/[id]/_preview/create-preview-route.ts @@ -3,7 +3,7 @@ import { toError } from '@sim/utils/errors' import { type NextRequest, NextResponse } from 'next/server' import { getSession } from '@/lib/auth' import { MAX_DOCUMENT_PREVIEW_CODE_BYTES } from '@/lib/execution/constants' -import { runSandboxTask } from '@/lib/execution/sandbox/run-task' +import { runSandboxTask, SandboxUserCodeError } from '@/lib/execution/sandbox/run-task' import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' import type { SandboxTaskId } from '@/sandbox-tasks/registry' @@ -83,6 +83,14 @@ export function createDocumentPreviewRoute(config: DocumentPreviewRouteConfig) { }) } catch (err) { const message = toError(err).message + if (err instanceof SandboxUserCodeError) { + logger.warn(`${config.label} preview user code failed`, { + error: message, + errorName: err.name, + workspaceId, + }) + return NextResponse.json({ error: message, errorName: err.name }, { status: 422 }) + } logger.error(`${config.label} preview generation failed`, { error: message, workspaceId }) return NextResponse.json({ error: message }, { status: 500 }) } diff --git a/apps/sim/app/api/workspaces/[id]/docx/preview/route.test.ts b/apps/sim/app/api/workspaces/[id]/docx/preview/route.test.ts index cffe9cf9aef..6f14fd0649a 100644 --- a/apps/sim/app/api/workspaces/[id]/docx/preview/route.test.ts +++ b/apps/sim/app/api/workspaces/[id]/docx/preview/route.test.ts @@ -6,9 +6,15 @@ import { NextRequest } from 'next/server' import { beforeEach, describe, expect, it, vi } from 'vitest' import { MAX_DOCUMENT_PREVIEW_CODE_BYTES } from '@/lib/execution/constants' -const { mockRunSandboxTask } = vi.hoisted(() => ({ - mockRunSandboxTask: vi.fn(), -})) +const { mockRunSandboxTask, SandboxUserCodeError } = vi.hoisted(() => { + class SandboxUserCodeError extends Error { + constructor(message: string, name: string) { + super(message) + this.name = name + } + } + return { mockRunSandboxTask: vi.fn(), SandboxUserCodeError } +}) const mockVerifyWorkspaceMembership = workflowsApiUtilsMockFns.mockVerifyWorkspaceMembership @@ -16,6 +22,7 @@ vi.mock('@/app/api/workflows/utils', () => workflowsApiUtilsMock) vi.mock('@/lib/execution/sandbox/run-task', () => ({ runSandboxTask: mockRunSandboxTask, + SandboxUserCodeError, })) import { POST } from '@/app/api/workspaces/[id]/docx/preview/route' @@ -189,4 +196,31 @@ describe('DOCX preview API route', () => { expect(response.status).toBe(500) await expect(response.json()).resolves.toEqual({ error: 'boom: sandbox failed' }) }) + + it('returns 422 when user code throws inside the sandbox', async () => { + mockRunSandboxTask.mockRejectedValue( + new SandboxUserCodeError('Invalid or unexpected token', 'SyntaxError') + ) + + const request = new NextRequest( + 'http://localhost:3000/api/workspaces/workspace-1/docx/preview', + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ code: 'const x = ' }), + } + ) + + const response = await POST(request, { + params: Promise.resolve({ id: 'workspace-1' }), + }) + + expect(response.status).toBe(422) + await expect(response.json()).resolves.toEqual({ + error: 'Invalid or unexpected token', + errorName: 'SyntaxError', + }) + }) }) diff --git a/apps/sim/app/api/workspaces/[id]/files/[fileId]/compiled-check/route.ts b/apps/sim/app/api/workspaces/[id]/files/[fileId]/compiled-check/route.ts new file mode 100644 index 00000000000..cbc7c7514cd --- /dev/null +++ b/apps/sim/app/api/workspaces/[id]/files/[fileId]/compiled-check/route.ts @@ -0,0 +1,90 @@ +import { createLogger } from '@sim/logger' +import { toError } from '@sim/utils/errors' +import { type NextRequest, NextResponse } from 'next/server' +import { getSession } from '@/lib/auth' +import { withRouteHandler } from '@/lib/core/utils/with-route-handler' +import { BINARY_DOC_TASKS, MAX_DOCUMENT_PREVIEW_CODE_BYTES } from '@/lib/execution/constants' +import { runSandboxTask, SandboxUserCodeError } from '@/lib/execution/sandbox/run-task' +import { downloadWorkspaceFile, getWorkspaceFile } from '@/lib/uploads/contexts/workspace' +import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' + +export const dynamic = 'force-dynamic' +export const runtime = 'nodejs' + +const logger = createLogger('WorkspaceFileCompiledCheckAPI') + +/** + * GET /api/workspaces/[id]/files/[fileId]/compiled-check + * + * Compiles the saved JavaScript source of a .docx / .pptx / .pdf file and + * returns whether it succeeds. Used by the file agent to self-verify generated + * code before finalising an edit. + * + * Returns: + * 200 { ok: true } + * 200 { ok: false, error: string, errorName: string } — user code error + * 4xx on auth / missing file / unsupported extension + * 500 on system (sandbox infra) failure + */ +export const GET = withRouteHandler( + async (request: NextRequest, { params }: { params: Promise<{ id: string; fileId: string }> }) => { + const { id: workspaceId, fileId } = await params + + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const membership = await verifyWorkspaceMembership(session.user.id, workspaceId) + if (!membership) { + return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 }) + } + + const fileRecord = await getWorkspaceFile(workspaceId, fileId) + if (!fileRecord) { + return NextResponse.json({ error: 'File not found' }, { status: 404 }) + } + + const ext = fileRecord.name.split('.').pop()?.toLowerCase() ?? '' + const taskId = BINARY_DOC_TASKS[ext] + if (!taskId) { + return NextResponse.json( + { error: `Compiled check only supports .docx, .pptx, and .pdf files` }, + { status: 422 } + ) + } + + let buffer: Buffer + try { + buffer = await downloadWorkspaceFile(fileRecord) + } catch (err) { + logger.error('Failed to download file for compiled check', { + fileId, + error: toError(err).message, + }) + return NextResponse.json({ error: 'Failed to read file' }, { status: 500 }) + } + + const code = buffer.toString('utf-8') + + if (Buffer.byteLength(code, 'utf-8') > MAX_DOCUMENT_PREVIEW_CODE_BYTES) { + return NextResponse.json({ error: 'File source exceeds maximum size' }, { status: 413 }) + } + + try { + await runSandboxTask(taskId, { code, workspaceId }, { ownerKey: `user:${session.user.id}` }) + return NextResponse.json({ ok: true }) + } catch (err) { + if (err instanceof SandboxUserCodeError) { + logger.info('Compiled check failed with user code error', { + fileId, + taskId, + error: toError(err).message, + errorName: err.name, + }) + return NextResponse.json({ ok: false, error: toError(err).message, errorName: err.name }) + } + throw err + } + } +) diff --git a/apps/sim/app/api/workspaces/[id]/files/[fileId]/content/route.ts b/apps/sim/app/api/workspaces/[id]/files/[fileId]/content/route.ts index 606978a9279..155f426607b 100644 --- a/apps/sim/app/api/workspaces/[id]/files/[fileId]/content/route.ts +++ b/apps/sim/app/api/workspaces/[id]/files/[fileId]/content/route.ts @@ -1,8 +1,8 @@ import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit' import { createLogger } from '@sim/logger' +import { toError } from '@sim/utils/errors' import { type NextRequest, NextResponse } from 'next/server' import { getSession } from '@/lib/auth' -import { generateRequestId } from '@/lib/core/utils/request' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' import { updateWorkspaceFileContent } from '@/lib/uploads/contexts/workspace' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' @@ -17,7 +17,6 @@ const logger = createLogger('WorkspaceFileContentAPI') */ export const PUT = withRouteHandler( async (request: NextRequest, { params }: { params: Promise<{ id: string; fileId: string }> }) => { - const requestId = generateRequestId() const { id: workspaceId, fileId } = await params try { @@ -32,20 +31,19 @@ export const PUT = withRouteHandler( workspaceId ) if (userPermission !== 'admin' && userPermission !== 'write') { - logger.warn( - `[${requestId}] User ${session.user.id} lacks write permission for workspace ${workspaceId}` - ) + logger.warn(`User ${session.user.id} lacks write permission for workspace ${workspaceId}`) return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 }) } const body = await request.json() - const { content } = body as { content: string } + const { content, encoding } = body as { content: string; encoding?: 'base64' | 'utf-8' } if (typeof content !== 'string') { return NextResponse.json({ error: 'Content must be a string' }, { status: 400 }) } - const buffer = Buffer.from(content, 'utf-8') + const buffer = + encoding === 'base64' ? Buffer.from(content, 'base64') : Buffer.from(content, 'utf-8') const maxFileSizeBytes = 50 * 1024 * 1024 if (buffer.length > maxFileSizeBytes) { @@ -62,7 +60,7 @@ export const PUT = withRouteHandler( buffer ) - logger.info(`[${requestId}] Updated content for workspace file: ${updatedFile.name}`) + logger.info(`Updated content for workspace file: ${updatedFile.name}`) recordAudit({ workspaceId, @@ -83,15 +81,15 @@ export const PUT = withRouteHandler( file: updatedFile, }) } catch (error) { - const errorMessage = error instanceof Error ? error.message : 'Failed to update file content' + const errorMessage = toError(error).message || 'Failed to update file content' const isNotFound = errorMessage.includes('File not found') const isQuotaExceeded = errorMessage.includes('Storage limit exceeded') const status = isNotFound ? 404 : isQuotaExceeded ? 402 : 500 if (status === 500) { - logger.error(`[${requestId}] Error updating file content:`, error) + logger.error('Error updating file content:', error) } else { - logger.warn(`[${requestId}] ${errorMessage}`) + logger.warn(errorMessage) } return NextResponse.json({ success: false, error: errorMessage }, { status }) diff --git a/apps/sim/app/api/workspaces/[id]/files/[fileId]/style/route.ts b/apps/sim/app/api/workspaces/[id]/files/[fileId]/style/route.ts new file mode 100644 index 00000000000..a07706da0f1 --- /dev/null +++ b/apps/sim/app/api/workspaces/[id]/files/[fileId]/style/route.ts @@ -0,0 +1,81 @@ +import { createLogger } from '@sim/logger' +import { toError } from '@sim/utils/errors' +import { type NextRequest, NextResponse } from 'next/server' +import { getSession } from '@/lib/auth' +import { extractDocumentStyle } from '@/lib/copilot/vfs/document-style' +import { withRouteHandler } from '@/lib/core/utils/with-route-handler' +import { downloadWorkspaceFile, getWorkspaceFile } from '@/lib/uploads/contexts/workspace' +import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' + +export const dynamic = 'force-dynamic' +export const runtime = 'nodejs' + +const logger = createLogger('WorkspaceFileStyleAPI') + +/** + * GET /api/workspaces/[id]/files/[fileId]/style + * Extract a compact JSON style summary from an uploaded .docx or .pptx file. + * Uses OOXML theme XML to return theme colors, font pair, and named styles. + * Only works on binary OOXML files (ZIP format) — not on JS source files. + */ +export const GET = withRouteHandler( + async (request: NextRequest, { params }: { params: Promise<{ id: string; fileId: string }> }) => { + const { id: workspaceId, fileId } = await params + + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const membership = await verifyWorkspaceMembership(session.user.id, workspaceId) + if (!membership) { + return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 }) + } + + const fileRecord = await getWorkspaceFile(workspaceId, fileId) + if (!fileRecord) { + return NextResponse.json({ error: 'File not found' }, { status: 404 }) + } + + const rawExt = fileRecord.name.split('.').pop()?.toLowerCase() + if (rawExt !== 'docx' && rawExt !== 'pptx') { + return NextResponse.json( + { error: 'Style extraction only supports .docx and .pptx files' }, + { status: 422 } + ) + } + const ext: 'docx' | 'pptx' = rawExt + + let buffer: Buffer + try { + buffer = await downloadWorkspaceFile(fileRecord) + } catch (err) { + logger.error('Failed to download file for style extraction', { + fileId, + error: toError(err).message, + }) + return NextResponse.json({ error: 'Failed to read file' }, { status: 500 }) + } + + const summary = await extractDocumentStyle(buffer, ext) + if (!summary) { + return NextResponse.json( + { + error: + 'File is not a compiled binary document — style extraction requires an uploaded or compiled .docx/.pptx file', + }, + { status: 422 } + ) + } + + logger.info('Extracted style summary via API', { + fileId, + format: ext, + themeName: summary.theme.name, + }) + + return NextResponse.json(summary, { + headers: { 'Cache-Control': 'private, max-age=300' }, + }) + } +) diff --git a/apps/sim/app/api/workspaces/[id]/pdf/preview/route.test.ts b/apps/sim/app/api/workspaces/[id]/pdf/preview/route.test.ts index cf5bd49e454..2dd189f89c7 100644 --- a/apps/sim/app/api/workspaces/[id]/pdf/preview/route.test.ts +++ b/apps/sim/app/api/workspaces/[id]/pdf/preview/route.test.ts @@ -6,9 +6,15 @@ import { NextRequest } from 'next/server' import { beforeEach, describe, expect, it, vi } from 'vitest' import { MAX_DOCUMENT_PREVIEW_CODE_BYTES } from '@/lib/execution/constants' -const { mockRunSandboxTask } = vi.hoisted(() => ({ - mockRunSandboxTask: vi.fn(), -})) +const { mockRunSandboxTask, SandboxUserCodeError } = vi.hoisted(() => { + class SandboxUserCodeError extends Error { + constructor(message: string, name: string) { + super(message) + this.name = name + } + } + return { mockRunSandboxTask: vi.fn(), SandboxUserCodeError } +}) const mockVerifyWorkspaceMembership = workflowsApiUtilsMockFns.mockVerifyWorkspaceMembership @@ -16,6 +22,7 @@ vi.mock('@/app/api/workflows/utils', () => workflowsApiUtilsMock) vi.mock('@/lib/execution/sandbox/run-task', () => ({ runSandboxTask: mockRunSandboxTask, + SandboxUserCodeError, })) import { POST } from '@/app/api/workspaces/[id]/pdf/preview/route' @@ -187,4 +194,31 @@ describe('PDF preview API route', () => { expect(response.status).toBe(500) await expect(response.json()).resolves.toEqual({ error: 'boom: sandbox failed' }) }) + + it('returns 422 when user code throws inside the sandbox', async () => { + mockRunSandboxTask.mockRejectedValue( + new SandboxUserCodeError('Invalid or unexpected token', 'SyntaxError') + ) + + const request = new NextRequest( + 'http://localhost:3000/api/workspaces/workspace-1/pdf/preview', + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ code: 'const x = ' }), + } + ) + + const response = await POST(request, { + params: Promise.resolve({ id: 'workspace-1' }), + }) + + expect(response.status).toBe(422) + await expect(response.json()).resolves.toEqual({ + error: 'Invalid or unexpected token', + errorName: 'SyntaxError', + }) + }) }) diff --git a/apps/sim/app/api/workspaces/[id]/pptx/preview/route.test.ts b/apps/sim/app/api/workspaces/[id]/pptx/preview/route.test.ts index 08a8e11f889..900dd41f639 100644 --- a/apps/sim/app/api/workspaces/[id]/pptx/preview/route.test.ts +++ b/apps/sim/app/api/workspaces/[id]/pptx/preview/route.test.ts @@ -6,9 +6,15 @@ import { NextRequest } from 'next/server' import { beforeEach, describe, expect, it, vi } from 'vitest' import { MAX_DOCUMENT_PREVIEW_CODE_BYTES } from '@/lib/execution/constants' -const { mockRunSandboxTask } = vi.hoisted(() => ({ - mockRunSandboxTask: vi.fn(), -})) +const { mockRunSandboxTask, SandboxUserCodeError } = vi.hoisted(() => { + class SandboxUserCodeError extends Error { + constructor(message: string, name: string) { + super(message) + this.name = name + } + } + return { mockRunSandboxTask: vi.fn(), SandboxUserCodeError } +}) const mockVerifyWorkspaceMembership = workflowsApiUtilsMockFns.mockVerifyWorkspaceMembership @@ -16,6 +22,7 @@ vi.mock('@/app/api/workflows/utils', () => workflowsApiUtilsMock) vi.mock('@/lib/execution/sandbox/run-task', () => ({ runSandboxTask: mockRunSandboxTask, + SandboxUserCodeError, })) import { POST } from '@/app/api/workspaces/[id]/pptx/preview/route' @@ -189,4 +196,31 @@ describe('PPTX preview API route', () => { expect(response.status).toBe(500) await expect(response.json()).resolves.toEqual({ error: 'boom: sandbox failed' }) }) + + it('returns 422 when user code throws inside the sandbox', async () => { + mockRunSandboxTask.mockRejectedValue( + new SandboxUserCodeError('Invalid or unexpected token', 'SyntaxError') + ) + + const request = new NextRequest( + 'http://localhost:3000/api/workspaces/workspace-1/pptx/preview', + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ code: 'const x = ' }), + } + ) + + const response = await POST(request, { + params: Promise.resolve({ id: 'workspace-1' }), + }) + + expect(response.status).toBe(422) + await expect(response.json()).resolves.toEqual({ + error: 'Invalid or unexpected token', + errorName: 'SyntaxError', + }) + }) }) diff --git a/apps/sim/app/api/workspaces/invitations/batch/route.ts b/apps/sim/app/api/workspaces/invitations/batch/route.ts new file mode 100644 index 00000000000..1f48746dd5a --- /dev/null +++ b/apps/sim/app/api/workspaces/invitations/batch/route.ts @@ -0,0 +1,129 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { getSession } from '@/lib/auth' +import { withRouteHandler } from '@/lib/core/utils/with-route-handler' +import { normalizeEmail } from '@/lib/invitations/core' +import { + createWorkspaceInvitation, + prepareWorkspaceInvitationContext, + WorkspaceInvitationError, + type WorkspaceInvitationResult, +} from '@/lib/invitations/workspace-invitations' +import { InvitationsNotAllowedError } from '@/ee/access-control/utils/permission-check' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('WorkspaceInvitationBatchAPI') + +interface BatchInvitationFailure { + email: string + error: string +} + +const batchInvitationSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + invitations: z + .array( + z.object({ + email: z.string().trim().min(1, 'Invitation email is required'), + permission: z.string().optional(), + }) + ) + .min(1, 'At least one invitation is required'), +}) + +type BatchInvitationRequest = z.infer + +function batchErrorResponse(error: unknown) { + if (error instanceof WorkspaceInvitationError) { + return NextResponse.json( + { + error: error.message, + ...(error.email ? { email: error.email } : {}), + ...(error.upgradeRequired !== undefined ? { upgradeRequired: error.upgradeRequired } : {}), + }, + { status: error.status } + ) + } + + if (error instanceof InvitationsNotAllowedError) { + return NextResponse.json({ error: error.message }, { status: 403 }) + } + + logger.error('Error creating workspace invitation batch:', error) + return NextResponse.json({ error: 'Failed to create invitation batch' }, { status: 500 }) +} + +export const POST = withRouteHandler(async (req: NextRequest) => { + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + try { + const parsedBody = batchInvitationSchema.safeParse(await req.json().catch(() => null)) + if (!parsedBody.success) { + return NextResponse.json( + { error: parsedBody.error.errors[0]?.message ?? 'Invalid invitation batch payload' }, + { status: 400 } + ) + } + const body: BatchInvitationRequest = parsedBody.data + + const context = await prepareWorkspaceInvitationContext({ + workspaceId: body.workspaceId, + inviterId: session.user.id, + inviterName: session.user.name || session.user.email || 'A user', + inviterEmail: session.user.email, + }) + + const successful: string[] = [] + const failed: BatchInvitationFailure[] = [] + const invitations: WorkspaceInvitationResult[] = [] + const seenEmails = new Set() + + for (const item of body.invitations) { + const normalizedEmail = normalizeEmail(item.email) + if (seenEmails.has(normalizedEmail)) { + failed.push({ + email: normalizedEmail, + error: `${normalizedEmail} appears more than once in this invitation batch`, + }) + continue + } + seenEmails.add(normalizedEmail) + + try { + const invitation = await createWorkspaceInvitation({ + context, + email: item.email, + permission: item.permission, + request: req, + }) + successful.push(invitation.email) + invitations.push(invitation) + } catch (error) { + if (error instanceof WorkspaceInvitationError) { + failed.push({ email: error.email ?? normalizedEmail, error: error.message }) + continue + } + + logger.error('Unexpected workspace invitation batch item failure:', { + email: normalizedEmail, + error, + }) + throw error + } + } + + return NextResponse.json({ + success: failed.length === 0, + successful, + failed, + invitations, + }) + } catch (error) { + return batchErrorResponse(error) + } +}) diff --git a/apps/sim/app/api/workspaces/invitations/route.test.ts b/apps/sim/app/api/workspaces/invitations/route.test.ts index e15b4236061..979fe7523bc 100644 --- a/apps/sim/app/api/workspaces/invitations/route.test.ts +++ b/apps/sim/app/api/workspaces/invitations/route.test.ts @@ -108,9 +108,9 @@ const mockGetSession = authMockFns.mockGetSession const mockGetWorkspaceWithOwner = permissionsMockFns.mockGetWorkspaceWithOwner import { UPGRADE_TO_INVITE_REASON } from '@/lib/workspaces/policy-constants' -import { POST } from '@/app/api/workspaces/invitations/route' +import { POST } from '@/app/api/workspaces/invitations/batch/route' -describe('POST /api/workspaces/invitations', () => { +describe('POST /api/workspaces/invitations/batch', () => { beforeEach(() => { vi.clearAllMocks() mockDbResults.value = [] @@ -169,8 +169,7 @@ describe('POST /api/workspaces/invitations', () => { const request = createMockRequest('POST', { workspaceId: 'workspace-1', - email: 'new@example.com', - permission: 'read', + invitations: [{ email: 'new@example.com', permission: 'read' }], }) const response = await POST(request) @@ -201,8 +200,7 @@ describe('POST /api/workspaces/invitations', () => { const request = createMockRequest('POST', { workspaceId: 'workspace-1', - email: 'new@example.com', - permission: 'read', + invitations: [{ email: 'new@example.com', permission: 'read' }], }) const response = await POST(request) @@ -213,7 +211,7 @@ describe('POST /api/workspaces/invitations', () => { expect(mockCreatePendingInvitation).not.toHaveBeenCalled() }) - it('rejects org-owned invites when the organization has no available seats', async () => { + it('reports org-owned invites as failed when the organization has no available seats', async () => { mockGetWorkspaceWithOwner.mockResolvedValueOnce({ id: 'workspace-1', name: 'Org Workspace', @@ -240,20 +238,25 @@ describe('POST /api/workspaces/invitations', () => { const request = createMockRequest('POST', { workspaceId: 'workspace-1', - email: 'new@example.com', - permission: 'read', + invitations: [{ email: 'new@example.com', permission: 'read' }], }) const response = await POST(request) const data = await response.json() - expect(response.status).toBe(400) - expect(data.error).toContain('No available seats') + expect(response.status).toBe(200) + expect(data.success).toBe(false) + expect(data.failed).toEqual([ + { + email: 'new@example.com', + error: 'No available seats. Currently using 5 of 5 seats.', + }, + ]) expect(mockValidateSeatAvailability).toHaveBeenCalledWith('org-1', 1) expect(mockCreatePendingInvitation).not.toHaveBeenCalled() }) - it('rejects org-owned invites for users already in another organization', async () => { + it('creates an external workspace invitation for users already in another organization', async () => { mockGetWorkspaceWithOwner.mockResolvedValueOnce({ id: 'workspace-1', name: 'Org Workspace', @@ -281,16 +284,25 @@ describe('POST /api/workspaces/invitations', () => { const request = createMockRequest('POST', { workspaceId: 'workspace-1', - email: 'new@example.com', - permission: 'read', + invitations: [{ email: 'new@example.com', permission: 'read' }], }) const response = await POST(request) const data = await response.json() - expect(response.status).toBe(409) - expect(data.error).toContain('already a member of another organization') - expect(mockCreatePendingInvitation).not.toHaveBeenCalled() + expect(response.status).toBe(200) + expect(data.success).toBe(true) + expect(data.invitations[0].membershipIntent).toBe('external') + expect(mockValidateSeatAvailability).not.toHaveBeenCalled() + expect(mockCreatePendingInvitation).toHaveBeenCalledWith( + expect.objectContaining({ + kind: 'workspace', + email: 'new@example.com', + organizationId: 'org-1', + membershipIntent: 'external', + grants: [{ workspaceId: 'workspace-1', permission: 'read' }], + }) + ) }) it('creates a unified workspace invitation for a grandfathered workspace', async () => { @@ -306,8 +318,7 @@ describe('POST /api/workspaces/invitations', () => { const request = createMockRequest('POST', { workspaceId: 'workspace-1', - email: 'new@example.com', - permission: 'write', + invitations: [{ email: 'new@example.com', permission: 'write' }], }) const response = await POST(request) @@ -327,6 +338,40 @@ describe('POST /api/workspaces/invitations', () => { expect(mockValidateSeatAvailability).not.toHaveBeenCalled() }) + it('creates multiple workspace invitations in one batch request', async () => { + mockDbResults.value = [[{ permissionType: 'admin' }], [], []] + mockCreatePendingInvitation + .mockResolvedValueOnce({ + invitationId: 'inv-1', + token: 'tok-1', + expiresAt: new Date(Date.now() + 7 * 24 * 60 * 60 * 1000), + }) + .mockResolvedValueOnce({ + invitationId: 'inv-2', + token: 'tok-2', + expiresAt: new Date(Date.now() + 7 * 24 * 60 * 60 * 1000), + }) + + const request = createMockRequest('POST', { + workspaceId: 'workspace-1', + invitations: [ + { email: 'first@example.com', permission: 'read' }, + { email: 'second@example.com', permission: 'write' }, + ], + }) + + const response = await POST(request) + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.success).toBe(true) + expect(data.successful).toEqual(['first@example.com', 'second@example.com']) + expect(data.failed).toEqual([]) + expect(data.invitations).toHaveLength(2) + expect(mockCreatePendingInvitation).toHaveBeenCalledTimes(2) + expect(mockSendInvitationEmail).toHaveBeenCalledTimes(2) + }) + it('rolls back the unified invitation when email delivery fails', async () => { mockGetWorkspaceWithOwner.mockResolvedValueOnce({ id: 'workspace-1', @@ -344,13 +389,18 @@ describe('POST /api/workspaces/invitations', () => { const request = createMockRequest('POST', { workspaceId: 'workspace-1', - email: 'new@example.com', - permission: 'read', + invitations: [{ email: 'new@example.com', permission: 'read' }], }) const response = await POST(request) - expect(response.status).toBe(502) + expect(response.status).toBe(200) + await expect(response.json()).resolves.toEqual( + expect.objectContaining({ + success: false, + failed: [{ email: 'new@example.com', error: 'mailer unavailable' }], + }) + ) expect(mockCancelPendingInvitation).toHaveBeenCalledWith('inv-1') }) }) diff --git a/apps/sim/app/api/workspaces/invitations/route.ts b/apps/sim/app/api/workspaces/invitations/route.ts index a994d6daa48..378b169ad66 100644 --- a/apps/sim/app/api/workspaces/invitations/route.ts +++ b/apps/sim/app/api/workspaces/invitations/route.ts @@ -1,35 +1,16 @@ -import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit' import { db } from '@sim/db' -import { permissions, type permissionTypeEnum, user, workspace } from '@sim/db/schema' +import { permissions, workspace } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq, isNull, sql } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { getSession } from '@/lib/auth' -import { getUserOrganization } from '@/lib/billing/organizations/membership' -import { validateSeatAvailability } from '@/lib/billing/validation/seat-management' -import { PlatformEvents } from '@/lib/core/telemetry' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' -import { listInvitationsForWorkspaces, normalizeEmail } from '@/lib/invitations/core' -import { - cancelPendingInvitation, - createPendingInvitation, - findPendingGrantForWorkspaceEmail, - sendInvitationEmail, -} from '@/lib/invitations/send' -import { captureServerEvent } from '@/lib/posthog/server' -import { getWorkspaceWithOwner } from '@/lib/workspaces/permissions/utils' -import { getWorkspaceInvitePolicy } from '@/lib/workspaces/policy' -import { - InvitationsNotAllowedError, - validateInvitationsAllowed, -} from '@/ee/access-control/utils/permission-check' +import { listInvitationsForWorkspaces } from '@/lib/invitations/core' export const dynamic = 'force-dynamic' const logger = createLogger('WorkspaceInvitationsAPI') -type PermissionType = (typeof permissionTypeEnum.enumValues)[number] - export const GET = withRouteHandler(async (req: NextRequest) => { const session = await getSession() if (!session?.user?.id) { @@ -61,241 +42,3 @@ export const GET = withRouteHandler(async (req: NextRequest) => { return NextResponse.json({ error: 'Failed to fetch invitations' }, { status: 500 }) } }) - -export const POST = withRouteHandler(async (req: NextRequest) => { - const session = await getSession() - if (!session?.user?.id) { - return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) - } - - try { - const { workspaceId, email, permission = 'read' } = await req.json() - - if (!workspaceId || !email) { - return NextResponse.json({ error: 'Workspace ID and email are required' }, { status: 400 }) - } - - await validateInvitationsAllowed(session.user.id, workspaceId) - - const validPermissions: PermissionType[] = ['admin', 'write', 'read'] - if (!validPermissions.includes(permission)) { - return NextResponse.json( - { error: `Invalid permission: must be one of ${validPermissions.join(', ')}` }, - { status: 400 } - ) - } - - const normalizedEmail = normalizeEmail(email) - - const userPermission = await db - .select() - .from(permissions) - .where( - and( - eq(permissions.entityId, workspaceId), - eq(permissions.entityType, 'workspace'), - eq(permissions.userId, session.user.id), - eq(permissions.permissionType, 'admin') - ) - ) - .then((rows) => rows[0]) - - if (!userPermission) { - return NextResponse.json( - { error: 'You need admin permissions to invite users' }, - { status: 403 } - ) - } - - const workspaceDetails = await getWorkspaceWithOwner(workspaceId) - if (!workspaceDetails) { - return NextResponse.json({ error: 'Workspace not found' }, { status: 404 }) - } - - const invitePolicy = await getWorkspaceInvitePolicy(workspaceDetails) - if (!invitePolicy.allowed) { - return NextResponse.json( - { - error: invitePolicy.reason ?? 'Invites are disabled for this workspace.', - upgradeRequired: invitePolicy.upgradeRequired, - }, - { status: 403 } - ) - } - - const existingUser = await db - .select() - .from(user) - .where(sql`lower(${user.email}) = ${normalizedEmail}`) - .then((rows) => rows[0]) - - if (existingUser) { - const existingPermission = await db - .select() - .from(permissions) - .where( - and( - eq(permissions.entityId, workspaceId), - eq(permissions.entityType, 'workspace'), - eq(permissions.userId, existingUser.id) - ) - ) - .then((rows) => rows[0]) - - if (existingPermission) { - return NextResponse.json( - { - error: `${normalizedEmail} already has access to this workspace`, - email: normalizedEmail, - }, - { status: 400 } - ) - } - - if (invitePolicy.requiresSeat && invitePolicy.organizationId) { - const existingMembership = await getUserOrganization(existingUser.id) - if ( - existingMembership && - existingMembership.organizationId !== invitePolicy.organizationId - ) { - return NextResponse.json( - { - error: - 'This user is already a member of another organization. They must leave it before joining this workspace.', - email: normalizedEmail, - }, - { status: 409 } - ) - } - - if (!existingMembership) { - const seatValidation = await validateSeatAvailability(invitePolicy.organizationId, 1) - if (!seatValidation.canInvite) { - return NextResponse.json( - { - error: seatValidation.reason || 'No available seats for this organization.', - email: normalizedEmail, - }, - { status: 400 } - ) - } - } - } - } else if (invitePolicy.requiresSeat && invitePolicy.organizationId) { - const seatValidation = await validateSeatAvailability(invitePolicy.organizationId, 1) - if (!seatValidation.canInvite) { - return NextResponse.json( - { - error: seatValidation.reason || 'No available seats for this organization.', - email: normalizedEmail, - }, - { status: 400 } - ) - } - } - - const existingInvitation = await findPendingGrantForWorkspaceEmail({ - workspaceId, - email: normalizedEmail, - }) - if (existingInvitation) { - return NextResponse.json( - { - error: `${normalizedEmail} has already been invited to this workspace`, - email: normalizedEmail, - }, - { status: 400 } - ) - } - - const { invitationId, token } = await createPendingInvitation({ - kind: 'workspace', - email: normalizedEmail, - inviterId: session.user.id, - organizationId: workspaceDetails.organizationId, - role: 'member', - grants: [ - { - workspaceId, - permission, - }, - ], - }) - - try { - PlatformEvents.workspaceMemberInvited({ - workspaceId, - invitedBy: session.user.id, - inviteeEmail: normalizedEmail, - role: permission, - }) - } catch { - // telemetry must not fail the operation - } - - captureServerEvent( - session.user.id, - 'workspace_member_invited', - { workspace_id: workspaceId, invitee_role: permission }, - { - groups: { workspace: workspaceId }, - setOnce: { first_invitation_sent_at: new Date().toISOString() }, - } - ) - - const emailResult = await sendInvitationEmail({ - invitationId, - token, - kind: 'workspace', - email: normalizedEmail, - inviterName: session.user.name || session.user.email || 'A user', - organizationId: workspaceDetails.organizationId, - organizationRole: 'member', - grants: [{ workspaceId, permission }], - }) - - if (!emailResult.success) { - await cancelPendingInvitation(invitationId) - return NextResponse.json( - { error: emailResult.error || 'Failed to send invitation email' }, - { status: 502 } - ) - } - - recordAudit({ - workspaceId, - actorId: session.user.id, - actorName: session.user.name, - actorEmail: session.user.email, - action: AuditAction.MEMBER_INVITED, - resourceType: AuditResourceType.WORKSPACE, - resourceId: workspaceId, - resourceName: normalizedEmail, - description: `Invited ${normalizedEmail} as ${permission}`, - metadata: { - targetEmail: normalizedEmail, - targetRole: permission, - workspaceName: workspaceDetails.name, - invitationId, - }, - request: req, - }) - - return NextResponse.json({ - success: true, - invitation: { - id: invitationId, - workspaceId, - email: normalizedEmail, - permission, - expiresAt: undefined, - }, - }) - } catch (error) { - if (error instanceof InvitationsNotAllowedError) { - return NextResponse.json({ error: error.message }, { status: 403 }) - } - logger.error('Error creating workspace invitation:', error) - return NextResponse.json({ error: 'Failed to create invitation' }, { status: 500 }) - } -}) diff --git a/apps/sim/app/api/workspaces/members/[id]/route.ts b/apps/sim/app/api/workspaces/members/[id]/route.ts index 43add66c447..b178c4c2905 100644 --- a/apps/sim/app/api/workspaces/members/[id]/route.ts +++ b/apps/sim/app/api/workspaces/members/[id]/route.ts @@ -1,13 +1,14 @@ import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit' import { db } from '@sim/db' -import { permissions, workspace } from '@sim/db/schema' +import { permissionGroupMember, permissions, workspace } from '@sim/db/schema' import { createLogger } from '@sim/logger' +import { generateId } from '@sim/utils/id' import { and, eq } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { getSession } from '@/lib/auth' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' -import { revokeWorkspaceCredentialMemberships } from '@/lib/credentials/access' +import { revokeWorkspaceCredentialMembershipsTx } from '@/lib/credentials/access' import { captureServerEvent } from '@/lib/posthog/server' import { hasWorkspaceAdminAccess } from '@/lib/workspaces/permissions/utils' @@ -32,7 +33,10 @@ export const DELETE = withRouteHandler( const { workspaceId } = body const workspaceRow = await db - .select({ billedAccountUserId: workspace.billedAccountUserId }) + .select({ + ownerId: workspace.ownerId, + billedAccountUserId: workspace.billedAccountUserId, + }) .from(workspace) .where(eq(workspace.id, workspaceId)) .limit(1) @@ -61,7 +65,10 @@ export const DELETE = withRouteHandler( ) .then((rows) => rows[0]) - if (!userPermission) { + const isRemovingWorkspaceOwner = workspaceRow[0].ownerId === userId + const isOwnerOnlyRemoval = isRemovingWorkspaceOwner && !userPermission + + if (!userPermission && !isOwnerOnlyRemoval) { return NextResponse.json({ error: 'User not found in workspace' }, { status: 404 }) } @@ -73,8 +80,19 @@ export const DELETE = withRouteHandler( return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 }) } + if ( + isRemovingWorkspaceOwner && + !isSelf && + session.user.id !== workspaceRow[0].billedAccountUserId + ) { + return NextResponse.json( + { error: 'Only the workspace owner or billing account can remove the workspace owner' }, + { status: 403 } + ) + } + // Prevent removing yourself if you're the last admin - if (isSelf && userPermission.permissionType === 'admin') { + if (isSelf && userPermission?.permissionType === 'admin' && !isRemovingWorkspaceOwner) { const otherAdmins = await db .select() .from(permissions) @@ -95,18 +113,78 @@ export const DELETE = withRouteHandler( } } - // Delete the user's permissions for this workspace - await db - .delete(permissions) - .where( - and( - eq(permissions.userId, userId), - eq(permissions.entityType, 'workspace'), - eq(permissions.entityId, workspaceId) + const ownershipTransferred = await db.transaction(async (tx) => { + let didTransferOwnership = false + + if (isRemovingWorkspaceOwner) { + /** + * Invariant: the billed account is the org owner for org workspaces, + * the owner for personal workspaces, and a workspace admin for + * grandfathered shared workspaces. + */ + const newOwnerId = workspaceRow[0].billedAccountUserId + + await tx + .update(workspace) + .set({ ownerId: newOwnerId, updatedAt: new Date() }) + .where(eq(workspace.id, workspaceId)) + + const [existingNewOwnerPermission] = await tx + .select({ id: permissions.id }) + .from(permissions) + .where( + and( + eq(permissions.userId, newOwnerId), + eq(permissions.entityType, 'workspace'), + eq(permissions.entityId, workspaceId) + ) + ) + .limit(1) + + if (existingNewOwnerPermission) { + await tx + .update(permissions) + .set({ permissionType: 'admin', updatedAt: new Date() }) + .where(eq(permissions.id, existingNewOwnerPermission.id)) + } else { + const now = new Date() + await tx.insert(permissions).values({ + id: generateId(), + userId: newOwnerId, + entityType: 'workspace', + entityId: workspaceId, + permissionType: 'admin', + createdAt: now, + updatedAt: now, + }) + } + + didTransferOwnership = true + } + + await tx + .delete(permissions) + .where( + and( + eq(permissions.userId, userId), + eq(permissions.entityType, 'workspace'), + eq(permissions.entityId, workspaceId) + ) ) - ) - await revokeWorkspaceCredentialMemberships(workspaceId, userId) + await revokeWorkspaceCredentialMembershipsTx(tx, workspaceId, userId) + + await tx + .delete(permissionGroupMember) + .where( + and( + eq(permissionGroupMember.userId, userId), + eq(permissionGroupMember.workspaceId, workspaceId) + ) + ) + + return didTransferOwnership + }) captureServerEvent( session.user.id, @@ -126,8 +204,9 @@ export const DELETE = withRouteHandler( description: isSelf ? 'Left the workspace' : `Removed member ${userId} from the workspace`, metadata: { removedUserId: userId, - removedUserRole: userPermission.permissionType, + removedUserRole: userPermission?.permissionType ?? 'owner', selfRemoval: isSelf, + ownershipTransferred, }, request: req, }) diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/resource.tsx b/apps/sim/app/workspace/[workspaceId]/components/resource/resource.tsx index 32792f1f367..9ba68c197d5 100644 --- a/apps/sim/app/workspace/[workspaceId]/components/resource/resource.tsx +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/resource.tsx @@ -313,7 +313,10 @@ export const ResourceTable = memo(function ResourceTable({ -
+
@@ -562,7 +565,7 @@ const ResourceColGroup = memo(function ResourceColGroup({ key={col.id} style={ colIdx === 0 - ? { minWidth: 200 * (col.widthMultiplier ?? 1) } + ? { width: 400 * (col.widthMultiplier ?? 1) } : { width: 160 * (col.widthMultiplier ?? 1) } } /> diff --git a/apps/sim/app/workspace/[workspaceId]/files/[fileId]/page.tsx b/apps/sim/app/workspace/[workspaceId]/files/[fileId]/page.tsx index 81398b7f17b..2bc2bef02d6 100644 --- a/apps/sim/app/workspace/[workspaceId]/files/[fileId]/page.tsx +++ b/apps/sim/app/workspace/[workspaceId]/files/[fileId]/page.tsx @@ -1,3 +1,4 @@ +import { Suspense } from 'react' import type { Metadata } from 'next' import { Files } from '../files' @@ -6,4 +7,10 @@ export const metadata: Metadata = { robots: { index: false }, } -export default Files +export default function FilesFilePage() { + return ( + + + + ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/data-table.tsx b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/data-table.tsx index 5e31edcfb55..672811de475 100644 --- a/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/data-table.tsx +++ b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/data-table.tsx @@ -1,11 +1,98 @@ -import { memo } from 'react' +'use client' + +import { forwardRef, memo, useCallback, useImperativeHandle, useRef, useState } from 'react' +import { cn } from '@/lib/core/utils/cn' + +interface EditConfig { + onCellChange: (row: number, col: number, value: string) => void + onHeaderChange: (col: number, value: string) => void +} interface DataTableProps { headers: string[] rows: string[][] + editConfig?: EditConfig +} + +export interface DataTableHandle { + commitEdit: () => void } -export const DataTable = memo(function DataTable({ headers, rows }: DataTableProps) { +type EditingCell = { row: number; col: number } | null + +const DataTableBase = forwardRef(function DataTable( + { headers, rows, editConfig }, + ref +) { + const [editingCell, setEditingCell] = useState(null) + const [editValue, setEditValue] = useState('') + + const editStateRef = useRef({ editingCell, editValue, editConfig }) + editStateRef.current = { editingCell, editValue, editConfig } + + // Prevents double-commit if onBlur and imperative commitEdit fire concurrently + const isCommittedRef = useRef(false) + + useImperativeHandle( + ref, + () => ({ + commitEdit: () => { + if (isCommittedRef.current) return + const { editingCell, editValue, editConfig } = editStateRef.current + if (!editingCell || !editConfig) return + isCommittedRef.current = true + const { row, col } = editingCell + if (row === -1) { + editConfig.onHeaderChange(col, editValue) + } else { + editConfig.onCellChange(row, col, editValue) + } + setEditingCell(null) + }, + }), + [] + ) + + const setInputRef = useCallback((node: HTMLInputElement | null) => { + if (node) { + node.focus() + node.select() + } + }, []) + + const startEdit = (row: number, col: number, currentValue: string) => { + if (!editConfig) return + isCommittedRef.current = false + setEditingCell({ row, col }) + setEditValue(currentValue) + } + + const commitEdit = () => { + if (isCommittedRef.current || !editingCell || !editConfig) return + isCommittedRef.current = true + const { row, col } = editingCell + if (row === -1) { + editConfig.onHeaderChange(col, editValue) + } else { + editConfig.onCellChange(row, col, editValue) + } + setEditingCell(null) + } + + const cancelEdit = () => setEditingCell(null) + + const handleKeyDown = (e: React.KeyboardEvent) => { + if (e.key === 'Enter' || e.key === 'Tab') { + e.preventDefault() + commitEdit() + } else if (e.key === 'Escape') { + cancelEdit() + } + } + + const isEditing = (row: number, col: number) => + editingCell?.row === row && editingCell?.col === col + return (
@@ -14,9 +101,24 @@ export const DataTable = memo(function DataTable({ headers, rows }: DataTablePro {headers.map((header, i) => ( ))} @@ -25,8 +127,26 @@ export const DataTable = memo(function DataTable({ headers, rows }: DataTablePro {rows.map((row, ri) => ( {headers.map((_, ci) => ( - ))} @@ -36,3 +156,5 @@ export const DataTable = memo(function DataTable({ headers, rows }: DataTablePro ) }) + +export const DataTable = memo(DataTableBase) diff --git a/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/docx-preview.tsx b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/docx-preview.tsx new file mode 100644 index 00000000000..1aecee7906c --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/docx-preview.tsx @@ -0,0 +1,214 @@ +'use client' + +import { memo, useCallback, useEffect, useRef, useState } from 'react' +import { createLogger } from '@sim/logger' +import { toError } from '@sim/utils/errors' +import { cn } from '@/lib/core/utils/cn' +import type { WorkspaceFileRecord } from '@/lib/uploads/contexts/workspace' +import { useWorkspaceFileBinary } from '@/hooks/queries/workspace-files' +import { PDF_PAGE_SKELETON, PreviewError, resolvePreviewError } from './preview-shared' + +const logger = createLogger('DocxPreview') + +/** + * Fit the rendered docx pages to the host container width using a CSS scale. + * The library renders `
` at the document's natural page + * width (in cm), which overflows narrow panels. + */ +function fitDocxToContainer(host: HTMLElement) { + const wrapper = host.querySelector('.docx-wrapper') + if (!wrapper) return + const section = wrapper.querySelector('section.docx') + if (!section) return + + wrapper.style.transform = '' + wrapper.style.transformOrigin = 'top left' + wrapper.style.width = '' + wrapper.style.marginRight = '' + wrapper.style.marginBottom = '' + + const naturalPageWidth = section.offsetWidth + if (!naturalPageWidth) return + + const wrapperStyle = window.getComputedStyle(wrapper) + const horizontalPadding = + Number.parseFloat(wrapperStyle.paddingLeft) + Number.parseFloat(wrapperStyle.paddingRight) + const naturalWrapperWidth = naturalPageWidth + horizontalPadding + const available = host.clientWidth + const scale = Math.min(1, available / naturalWrapperWidth) + + if (scale >= 1) return + + wrapper.style.width = `${naturalWrapperWidth}px` + wrapper.style.transform = `scale(${scale})` + const naturalHeight = wrapper.offsetHeight + wrapper.style.marginRight = `${(scale - 1) * naturalWrapperWidth}px` + wrapper.style.marginBottom = `${(scale - 1) * naturalHeight}px` +} + +export const DocxPreview = memo(function DocxPreview({ + file, + workspaceId, + streamingContent, +}: { + file: WorkspaceFileRecord + workspaceId: string + streamingContent?: string +}) { + const containerRef = useRef(null) + const lastSuccessfulHtmlRef = useRef('') + const { + data: fileData, + isLoading, + error: fetchError, + } = useWorkspaceFileBinary(workspaceId, file.id, file.key) + const [renderError, setRenderError] = useState(null) + const [rendering, setRendering] = useState(false) + const [hasRenderedPreview, setHasRenderedPreview] = useState(false) + + const applyPostRenderStyling = useCallback(() => { + const container = containerRef.current + if (!container) return + const wrapper = container.querySelector('.docx-wrapper') + if (wrapper) wrapper.style.background = 'transparent' + container.querySelectorAll('section.docx').forEach((page) => { + page.style.boxShadow = 'var(--shadow-medium)' + }) + fitDocxToContainer(container) + }, []) + + useEffect(() => { + const container = containerRef.current + if (!container) return + const observer = new ResizeObserver(() => fitDocxToContainer(container)) + observer.observe(container) + return () => observer.disconnect() + }, []) + + useEffect(() => { + if (!containerRef.current || !fileData || streamingContent !== undefined) return + + let cancelled = false + + async function render() { + try { + setRendering(true) + const { renderAsync } = await import('docx-preview') + if (cancelled || !containerRef.current) return + setRenderError(null) + containerRef.current.innerHTML = '' + await renderAsync(fileData, containerRef.current, undefined, { + inWrapper: true, + ignoreWidth: false, + ignoreHeight: false, + }) + if (!cancelled && containerRef.current) { + applyPostRenderStyling() + lastSuccessfulHtmlRef.current = containerRef.current.innerHTML + setHasRenderedPreview(true) + } + } catch (err) { + if (!cancelled) { + const msg = toError(err).message || 'Failed to render document' + logger.error('DOCX render failed', { error: msg }) + setRenderError(msg) + } + } finally { + if (!cancelled) { + setRendering(false) + } + } + } + + render() + return () => { + cancelled = true + } + }, [fileData, streamingContent, applyPostRenderStyling]) + + useEffect(() => { + if (streamingContent === undefined || !containerRef.current) return + + let cancelled = false + const controller = new AbortController() + + const debounceTimer = setTimeout(async () => { + const container = containerRef.current + if (!container || cancelled) return + + const previousHtml = lastSuccessfulHtmlRef.current + + try { + setRendering(true) + + const response = await fetch(`/api/workspaces/${workspaceId}/docx/preview`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ code: streamingContent }), + signal: controller.signal, + }) + if (!response.ok) { + const err = await response.json().catch(() => ({ error: 'Preview failed' })) + throw new Error(err.error || 'Preview failed') + } + + const arrayBuffer = await response.arrayBuffer() + if (cancelled || !containerRef.current) return + + const { renderAsync } = await import('docx-preview') + if (cancelled || !containerRef.current) return + + containerRef.current.innerHTML = '' + await renderAsync(new Uint8Array(arrayBuffer), containerRef.current, undefined, { + inWrapper: true, + ignoreWidth: false, + ignoreHeight: false, + }) + + if (!cancelled && containerRef.current) { + applyPostRenderStyling() + lastSuccessfulHtmlRef.current = containerRef.current.innerHTML + setHasRenderedPreview(true) + } + } catch (err) { + if (!cancelled && !(err instanceof DOMException && err.name === 'AbortError')) { + if (containerRef.current && previousHtml) { + containerRef.current.innerHTML = previousHtml + applyPostRenderStyling() + setHasRenderedPreview(true) + } + const msg = toError(err).message || 'Failed to render document' + logger.info('Transient DOCX streaming preview error (suppressed)', { error: msg }) + } + } finally { + if (!cancelled) { + setRendering(false) + } + } + }, 500) + + return () => { + cancelled = true + clearTimeout(debounceTimer) + controller.abort() + } + }, [streamingContent, workspaceId, applyPostRenderStyling]) + + const error = streamingContent !== undefined ? null : resolvePreviewError(fetchError, renderError) + if (error) return + + const showSkeleton = + !hasRenderedPreview && (streamingContent !== undefined || isLoading || rendering) + + return ( +
+ {showSkeleton && ( +
{PDF_PAGE_SKELETON}
+ )} +
+
+ ) +}) diff --git a/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/file-category.test.ts b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/file-category.test.ts new file mode 100644 index 00000000000..5cfd8498edc --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/file-category.test.ts @@ -0,0 +1,233 @@ +/** + * @vitest-environment node + */ +import { describe, expect, it, vi } from 'vitest' + +vi.mock('@/lib/uploads/utils/validation', () => ({ + SUPPORTED_CODE_EXTENSIONS: ['js', 'ts', 'py', 'go', 'rs', 'sh', 'sql'], +})) + +vi.mock('@/lib/uploads/utils/file-utils', () => ({ + getFileExtension: (filename: string): string => { + const lastDot = filename.lastIndexOf('.') + return lastDot !== -1 ? filename.slice(lastDot + 1).toLowerCase() : '' + }, +})) + +import { resolveFileCategory } from './file-category' + +describe('resolveFileCategory — MIME type routing', () => { + describe('text-editable', () => { + it.each([ + 'text/plain', + 'text/markdown', + 'application/json', + 'application/x-yaml', + 'text/csv', + 'text/html', + 'text/xml', + 'application/xml', + 'text/css', + 'text/javascript', + 'application/javascript', + 'application/typescript', + 'application/toml', + 'text/x-python', + 'text/x-sh', + 'text/x-sql', + 'image/svg+xml', + 'text/x-mermaid', + ])('%s → text-editable', (mime) => { + expect(resolveFileCategory(mime, 'file.txt')).toBe('text-editable') + }) + }) + + describe('iframe-previewable (PDF)', () => { + it('application/pdf → iframe-previewable', () => { + expect(resolveFileCategory('application/pdf', 'doc.pdf')).toBe('iframe-previewable') + }) + + it('text/x-pdflibjs → iframe-previewable', () => { + expect(resolveFileCategory('text/x-pdflibjs', 'generated.pdf')).toBe('iframe-previewable') + }) + }) + + describe('image-previewable', () => { + it.each(['image/png', 'image/jpeg', 'image/gif', 'image/webp'])( + '%s → image-previewable', + (mime) => { + expect(resolveFileCategory(mime, 'img.png')).toBe('image-previewable') + } + ) + }) + + describe('audio-previewable', () => { + it.each([ + 'audio/mpeg', + 'audio/mp4', + 'audio/wav', + 'audio/webm', + 'audio/ogg', + 'audio/flac', + 'audio/aac', + 'audio/opus', + 'audio/x-m4a', + ])('%s → audio-previewable', (mime) => { + expect(resolveFileCategory(mime, 'audio.mp3')).toBe('audio-previewable') + }) + }) + + describe('video-previewable', () => { + it.each(['video/mp4', 'video/quicktime', 'video/x-msvideo', 'video/x-matroska', 'video/webm'])( + '%s → video-previewable', + (mime) => { + expect(resolveFileCategory(mime, 'video.mp4')).toBe('video-previewable') + } + ) + }) + + describe('docx-previewable', () => { + it('application/vnd.openxmlformats-officedocument.wordprocessingml.document → docx-previewable', () => { + expect( + resolveFileCategory( + 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', + 'doc.docx' + ) + ).toBe('docx-previewable') + }) + + it('text/x-docxjs → docx-previewable', () => { + expect(resolveFileCategory('text/x-docxjs', 'doc.docx')).toBe('docx-previewable') + }) + }) + + describe('pptx-previewable', () => { + it('application/vnd.openxmlformats-officedocument.presentationml.presentation → pptx-previewable', () => { + expect( + resolveFileCategory( + 'application/vnd.openxmlformats-officedocument.presentationml.presentation', + 'deck.pptx' + ) + ).toBe('pptx-previewable') + }) + + it('text/x-pptxgenjs → pptx-previewable', () => { + expect(resolveFileCategory('text/x-pptxgenjs', 'deck.pptx')).toBe('pptx-previewable') + }) + }) + + describe('xlsx-previewable', () => { + it('application/vnd.openxmlformats-officedocument.spreadsheetml.sheet → xlsx-previewable', () => { + expect( + resolveFileCategory( + 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', + 'data.xlsx' + ) + ).toBe('xlsx-previewable') + }) + }) +}) + +describe('resolveFileCategory — extension fallback', () => { + describe('text-editable extensions', () => { + it.each(['md', 'txt', 'json', 'yaml', 'yml', 'csv', 'html', 'htm', 'svg', 'mmd'])( + '.%s → text-editable', + (ext) => { + expect(resolveFileCategory(null, `file.${ext}`)).toBe('text-editable') + } + ) + }) + + describe('code extensions from SUPPORTED_CODE_EXTENSIONS', () => { + it.each(['js', 'ts', 'py', 'go', 'rs', 'sh', 'sql'])('.%s → text-editable', (ext) => { + expect(resolveFileCategory(null, `file.${ext}`)).toBe('text-editable') + }) + }) + + describe('pdf extension', () => { + it('.pdf → iframe-previewable', () => { + expect(resolveFileCategory(null, 'document.pdf')).toBe('iframe-previewable') + }) + }) + + describe('image extensions', () => { + it.each(['png', 'jpg', 'jpeg', 'gif', 'webp'])('.%s → image-previewable', (ext) => { + expect(resolveFileCategory(null, `image.${ext}`)).toBe('image-previewable') + }) + }) + + describe('audio extensions', () => { + it.each(['mp3', 'm4a', 'wav', 'ogg', 'flac', 'aac', 'opus'])( + '.%s → audio-previewable', + (ext) => { + expect(resolveFileCategory(null, `audio.${ext}`)).toBe('audio-previewable') + } + ) + }) + + describe('video extensions', () => { + it.each(['mp4', 'mov', 'avi', 'mkv', 'webm'])('.%s → video-previewable', (ext) => { + expect(resolveFileCategory(null, `video.${ext}`)).toBe('video-previewable') + }) + }) + + describe('docx extension', () => { + it('.docx → docx-previewable', () => { + expect(resolveFileCategory(null, 'doc.docx')).toBe('docx-previewable') + }) + }) + + describe('pptx extension', () => { + it('.pptx → pptx-previewable', () => { + expect(resolveFileCategory(null, 'deck.pptx')).toBe('pptx-previewable') + }) + }) + + describe('xlsx extension', () => { + it('.xlsx → xlsx-previewable', () => { + expect(resolveFileCategory(null, 'data.xlsx')).toBe('xlsx-previewable') + }) + }) + + describe('unsupported', () => { + it('unknown extension → unsupported', () => { + expect(resolveFileCategory(null, 'file.xyz')).toBe('unsupported') + }) + + it('unknown mime with unknown extension → unsupported', () => { + expect(resolveFileCategory('application/octet-stream', 'file.bin')).toBe('unsupported') + }) + + it('no extension, no mime → unsupported', () => { + expect(resolveFileCategory(null, 'LICENSE')).toBe('unsupported') + }) + }) +}) + +describe('resolveFileCategory — MIME priority', () => { + it('text/plain MIME + .pdf extension → text-editable (MIME wins)', () => { + expect(resolveFileCategory('text/plain', 'notes.pdf')).toBe('text-editable') + }) + + it('application/pdf MIME + .txt extension → iframe-previewable (MIME wins)', () => { + expect(resolveFileCategory('application/pdf', 'disguised.txt')).toBe('iframe-previewable') + }) + + it('null MIME falls through to extension routing', () => { + expect(resolveFileCategory(null, 'data.xlsx')).toBe('xlsx-previewable') + }) + + it('unknown MIME falls through to extension routing', () => { + expect(resolveFileCategory('application/octet-stream', 'data.xlsx')).toBe('xlsx-previewable') + }) +}) + +describe('resolveFileCategory — extension case', () => { + it('recognises uppercase extension via extension lookup (getFileExtension lowercases)', () => { + expect(resolveFileCategory(null, 'README.MD')).toBe('text-editable') + }) + + it('handles mixed-case correctly for json', () => { + expect(resolveFileCategory(null, 'config.JSON')).toBe('text-editable') + }) +}) diff --git a/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/file-category.ts b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/file-category.ts new file mode 100644 index 00000000000..2eb2c96810b --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/file-category.ts @@ -0,0 +1,117 @@ +import { getFileExtension } from '@/lib/uploads/utils/file-utils' +import { SUPPORTED_CODE_EXTENSIONS } from '@/lib/uploads/utils/validation' + +const TEXT_EDITABLE_MIME_TYPES = new Set([ + 'text/markdown', + 'text/plain', + 'application/json', + 'application/x-yaml', + 'text/csv', + 'text/html', + 'text/xml', + 'application/xml', + 'text/css', + 'text/javascript', + 'application/javascript', + 'application/typescript', + 'application/toml', + 'text/x-python', + 'text/x-sh', + 'text/x-sql', + 'image/svg+xml', + 'text/x-mermaid', +]) + +const TEXT_EDITABLE_EXTENSIONS = new Set([ + 'md', + 'txt', + 'json', + 'yaml', + 'yml', + 'csv', + 'html', + 'htm', + 'svg', + 'mmd', + ...SUPPORTED_CODE_EXTENSIONS, +]) + +const IFRAME_PREVIEWABLE_MIME_TYPES = new Set(['application/pdf', 'text/x-pdflibjs']) +const IFRAME_PREVIEWABLE_EXTENSIONS = new Set(['pdf']) + +const IMAGE_PREVIEWABLE_MIME_TYPES = new Set(['image/png', 'image/jpeg', 'image/gif', 'image/webp']) +const IMAGE_PREVIEWABLE_EXTENSIONS = new Set(['png', 'jpg', 'jpeg', 'gif', 'webp']) + +const AUDIO_PREVIEWABLE_MIME_TYPES = new Set([ + 'audio/mpeg', + 'audio/mp4', + 'audio/wav', + 'audio/webm', + 'audio/ogg', + 'audio/flac', + 'audio/aac', + 'audio/opus', + 'audio/x-m4a', +]) +const AUDIO_PREVIEWABLE_EXTENSIONS = new Set(['mp3', 'm4a', 'wav', 'ogg', 'flac', 'aac', 'opus']) + +const VIDEO_PREVIEWABLE_MIME_TYPES = new Set([ + 'video/mp4', + 'video/quicktime', + 'video/x-msvideo', + 'video/x-matroska', + 'video/webm', +]) +const VIDEO_PREVIEWABLE_EXTENSIONS = new Set(['mp4', 'mov', 'avi', 'mkv', 'webm']) + +const PPTX_PREVIEWABLE_MIME_TYPES = new Set([ + 'application/vnd.openxmlformats-officedocument.presentationml.presentation', + 'text/x-pptxgenjs', +]) +const PPTX_PREVIEWABLE_EXTENSIONS = new Set(['pptx']) + +const DOCX_PREVIEWABLE_MIME_TYPES = new Set([ + 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', + 'text/x-docxjs', +]) +const DOCX_PREVIEWABLE_EXTENSIONS = new Set(['docx']) + +const XLSX_PREVIEWABLE_MIME_TYPES = new Set([ + 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', +]) +const XLSX_PREVIEWABLE_EXTENSIONS = new Set(['xlsx']) + +export type FileCategory = + | 'text-editable' + | 'iframe-previewable' + | 'image-previewable' + | 'audio-previewable' + | 'video-previewable' + | 'pptx-previewable' + | 'docx-previewable' + | 'xlsx-previewable' + | 'unsupported' + +export function resolveFileCategory(mimeType: string | null, filename: string): FileCategory { + if (mimeType && TEXT_EDITABLE_MIME_TYPES.has(mimeType)) return 'text-editable' + if (mimeType && IFRAME_PREVIEWABLE_MIME_TYPES.has(mimeType)) return 'iframe-previewable' + if (mimeType && IMAGE_PREVIEWABLE_MIME_TYPES.has(mimeType)) return 'image-previewable' + if (mimeType && AUDIO_PREVIEWABLE_MIME_TYPES.has(mimeType)) return 'audio-previewable' + if (mimeType && VIDEO_PREVIEWABLE_MIME_TYPES.has(mimeType)) return 'video-previewable' + if (mimeType && DOCX_PREVIEWABLE_MIME_TYPES.has(mimeType)) return 'docx-previewable' + if (mimeType && PPTX_PREVIEWABLE_MIME_TYPES.has(mimeType)) return 'pptx-previewable' + if (mimeType && XLSX_PREVIEWABLE_MIME_TYPES.has(mimeType)) return 'xlsx-previewable' + + const ext = getFileExtension(filename) + const nameKey = ext || filename.toLowerCase() + if (TEXT_EDITABLE_EXTENSIONS.has(nameKey)) return 'text-editable' + if (IFRAME_PREVIEWABLE_EXTENSIONS.has(ext)) return 'iframe-previewable' + if (IMAGE_PREVIEWABLE_EXTENSIONS.has(ext)) return 'image-previewable' + if (AUDIO_PREVIEWABLE_EXTENSIONS.has(ext)) return 'audio-previewable' + if (VIDEO_PREVIEWABLE_EXTENSIONS.has(ext)) return 'video-previewable' + if (DOCX_PREVIEWABLE_EXTENSIONS.has(ext)) return 'docx-previewable' + if (PPTX_PREVIEWABLE_EXTENSIONS.has(ext)) return 'pptx-previewable' + if (XLSX_PREVIEWABLE_EXTENSIONS.has(ext)) return 'xlsx-previewable' + + return 'unsupported' +} diff --git a/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/file-viewer.tsx b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/file-viewer.tsx index 02e3c683ae9..55b00d8e4ed 100644 --- a/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/file-viewer.tsx +++ b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/file-viewer.tsx @@ -1,190 +1,33 @@ 'use client' -import { - memo, - type ReactElement, - useCallback, - useEffect, - useMemo, - useReducer, - useRef, - useState, -} from 'react' -import Editor from 'react-simple-code-editor' -import 'prismjs/components/prism-bash' -import 'prismjs/components/prism-css' -import 'prismjs/components/prism-markup' -import 'prismjs/components/prism-sql' -import 'prismjs/components/prism-typescript' -import 'prismjs/components/prism-yaml' +import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react' import { createLogger } from '@sim/logger' -import { ZoomIn, ZoomOut } from 'lucide-react' -import { - CODE_LINE_HEIGHT_PX, - Code as CodeEditor, - calculateGutterWidth, - getCodeEditorProps, - highlight, - languages, - Skeleton, -} from '@/components/emcn' -import { cn } from '@/lib/core/utils/cn' +import { toError } from '@sim/utils/errors' +import dynamic from 'next/dynamic' +import { Skeleton } from '@/components/emcn' import type { WorkspaceFileRecord } from '@/lib/uploads/contexts/workspace' import { getFileExtension } from '@/lib/uploads/utils/file-utils' -import { SUPPORTED_CODE_EXTENSIONS } from '@/lib/uploads/utils/validation' -import { - useUpdateWorkspaceFileContent, - useWorkspaceFileBinary, - useWorkspaceFileContent, -} from '@/hooks/queries/workspace-files' -import { useAutosave } from '@/hooks/use-autosave' -import { DataTable } from './data-table' -import { PreviewPanel, resolvePreviewType } from './preview-panel' +import { useWorkspaceFileBinary } from '@/hooks/queries/workspace-files' +import { resolveFileCategory } from './file-category' +import type { StreamingMode } from './text-editor-state' + +export type { StreamingMode } from './text-editor-state' + +import { DocxPreview } from './docx-preview' +import { ImagePreview } from './image-preview' +import type { PdfDocumentSource } from './pdf-viewer' +import { PptxPreview } from './pptx-preview' +import { resolvePreviewType } from './preview-panel' +import { PDF_PAGE_SKELETON, PreviewError, resolvePreviewError } from './preview-shared' +import { TextEditor } from './text-editor' +import { XlsxPreview } from './xlsx-preview' + +const PdfViewerCore = dynamic(() => import('./pdf-viewer').then((m) => m.PdfViewerCore), { + ssr: false, +}) const logger = createLogger('FileViewer') -const SPLIT_MIN_PCT = 20 -const SPLIT_MAX_PCT = 80 -const SPLIT_DEFAULT_PCT = 50 - -const TEXT_EDITABLE_MIME_TYPES = new Set([ - 'text/markdown', - 'text/plain', - 'application/json', - 'application/x-yaml', - 'text/csv', - 'text/html', - 'text/xml', - 'application/xml', - 'text/css', - 'text/javascript', - 'application/javascript', - 'application/typescript', - 'application/toml', - 'text/x-python', - 'text/x-sh', - 'text/x-sql', - 'image/svg+xml', -]) - -const TEXT_EDITABLE_EXTENSIONS = new Set([ - 'md', - 'txt', - 'json', - 'yaml', - 'yml', - 'csv', - 'html', - 'htm', - 'svg', - ...SUPPORTED_CODE_EXTENSIONS, -]) - -const IFRAME_PREVIEWABLE_MIME_TYPES = new Set(['application/pdf', 'text/x-pdflibjs']) -const IFRAME_PREVIEWABLE_EXTENSIONS = new Set(['pdf']) - -const IMAGE_PREVIEWABLE_MIME_TYPES = new Set(['image/png', 'image/jpeg', 'image/gif', 'image/webp']) -const IMAGE_PREVIEWABLE_EXTENSIONS = new Set(['png', 'jpg', 'jpeg', 'gif', 'webp']) - -const PPTX_PREVIEWABLE_MIME_TYPES = new Set([ - 'application/vnd.openxmlformats-officedocument.presentationml.presentation', - 'text/x-pptxgenjs', -]) -const PPTX_PREVIEWABLE_EXTENSIONS = new Set(['pptx']) - -const DOCX_PREVIEWABLE_MIME_TYPES = new Set([ - 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', - 'text/x-docxjs', -]) -const DOCX_PREVIEWABLE_EXTENSIONS = new Set(['docx']) - -const XLSX_PREVIEWABLE_MIME_TYPES = new Set([ - 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', -]) -const XLSX_PREVIEWABLE_EXTENSIONS = new Set(['xlsx']) - -type FileCategory = - | 'text-editable' - | 'iframe-previewable' - | 'image-previewable' - | 'pptx-previewable' - | 'docx-previewable' - | 'xlsx-previewable' - | 'unsupported' - -type CodeEditorLanguage = - | 'javascript' - | 'json' - | 'python' - | 'typescript' - | 'bash' - | 'css' - | 'markup' - | 'sql' - | 'yaml' - -const CODE_EDITOR_LANGUAGE_BY_EXTENSION: Partial> = { - js: 'javascript', - jsx: 'javascript', - ts: 'typescript', - tsx: 'typescript', - py: 'python', - json: 'json', - sh: 'bash', - bash: 'bash', - zsh: 'bash', - fish: 'bash', - css: 'css', - scss: 'css', - less: 'css', - html: 'markup', - htm: 'markup', - xml: 'markup', - svg: 'markup', - sql: 'sql', - yaml: 'yaml', - yml: 'yaml', -} - -const CODE_EDITOR_LANGUAGE_BY_MIME: Partial> = { - 'text/javascript': 'javascript', - 'application/javascript': 'javascript', - 'text/typescript': 'typescript', - 'application/typescript': 'typescript', - 'text/x-python': 'python', - 'application/json': 'json', - 'text/x-shellscript': 'bash', - 'text/css': 'css', - 'text/html': 'markup', - 'text/xml': 'markup', - 'application/xml': 'markup', - 'image/svg+xml': 'markup', - 'text/x-sql': 'sql', - 'application/x-yaml': 'yaml', -} - -const CODE_EDITOR_LINE_HEIGHT_PX = CODE_LINE_HEIGHT_PX - -function resolveFileCategory(mimeType: string | null, filename: string): FileCategory { - if (mimeType && TEXT_EDITABLE_MIME_TYPES.has(mimeType)) return 'text-editable' - if (mimeType && IFRAME_PREVIEWABLE_MIME_TYPES.has(mimeType)) return 'iframe-previewable' - if (mimeType && IMAGE_PREVIEWABLE_MIME_TYPES.has(mimeType)) return 'image-previewable' - if (mimeType && DOCX_PREVIEWABLE_MIME_TYPES.has(mimeType)) return 'docx-previewable' - if (mimeType && PPTX_PREVIEWABLE_MIME_TYPES.has(mimeType)) return 'pptx-previewable' - if (mimeType && XLSX_PREVIEWABLE_MIME_TYPES.has(mimeType)) return 'xlsx-previewable' - - const ext = getFileExtension(filename) - const nameKey = ext || filename.toLowerCase() - if (TEXT_EDITABLE_EXTENSIONS.has(nameKey)) return 'text-editable' - if (IFRAME_PREVIEWABLE_EXTENSIONS.has(ext)) return 'iframe-previewable' - if (IMAGE_PREVIEWABLE_EXTENSIONS.has(ext)) return 'image-previewable' - if (DOCX_PREVIEWABLE_EXTENSIONS.has(ext)) return 'docx-previewable' - if (PPTX_PREVIEWABLE_EXTENSIONS.has(ext)) return 'pptx-previewable' - if (XLSX_PREVIEWABLE_EXTENSIONS.has(ext)) return 'xlsx-previewable' - - return 'unsupported' -} - export function isTextEditable(file: { type: string; name: string }): boolean { return resolveFileCategory(file.type, file.name) === 'text-editable' } @@ -194,13 +37,11 @@ export function isPreviewable(file: { type: string; name: string }): boolean { } export type PreviewMode = 'editor' | 'split' | 'preview' -type StreamingMode = 'append' | 'replace' interface FileViewerProps { file: WorkspaceFileRecord workspaceId: string canEdit: boolean - showPreview?: boolean previewMode?: PreviewMode autoFocus?: boolean onDirtyChange?: (isDirty: boolean) => void @@ -209,291 +50,13 @@ interface FileViewerProps { streamingContent?: string streamingMode?: StreamingMode disableStreamingAutoScroll?: boolean - useCodeRendererForCodeFiles?: boolean previewContextKey?: string } -function isCodeFile(file: { type: string; name: string }): boolean { - const ext = getFileExtension(file.name) - return ( - SUPPORTED_CODE_EXTENSIONS.includes(ext as (typeof SUPPORTED_CODE_EXTENSIONS)[number]) || - ext === 'html' || - ext === 'htm' || - ext === 'xml' || - ext === 'svg' - ) -} - -function resolveCodeEditorLanguage(file: { type: string; name: string }): CodeEditorLanguage { - const ext = getFileExtension(file.name) - return ( - CODE_EDITOR_LANGUAGE_BY_EXTENSION[ext] ?? - CODE_EDITOR_LANGUAGE_BY_MIME[file.type] ?? - (ext === 'json' ? 'json' : 'javascript') - ) -} - -function areNumberArraysEqual(a: number[], b: number[]): boolean { - if (a === b) return true - if (a.length !== b.length) return false - for (let index = 0; index < a.length; index++) { - if (a[index] !== b[index]) { - return false - } - } - return true -} - -type TextEditorContentPhase = 'uninitialized' | 'ready' | 'streaming' | 'reconciling' - -interface TextEditorContentState { - phase: TextEditorContentPhase - content: string - savedContent: string - lastStreamedContent: string | null -} - -interface SyncTextEditorContentStateOptions { - canReconcileToFetchedContent: boolean - fetchedContent?: string - streamingContent?: string - streamingMode: StreamingMode -} - -type TextEditorContentAction = - | ({ type: 'sync-external' } & SyncTextEditorContentStateOptions) - | { type: 'edit'; content: string } - | { type: 'save-success'; content: string } - -const INITIAL_TEXT_EDITOR_CONTENT_STATE: TextEditorContentState = { - phase: 'uninitialized', - content: '', - savedContent: '', - lastStreamedContent: null, -} - -function resolveStreamingEditorContent( - fetchedContent: string | undefined, - streamingContent: string, - streamingMode: StreamingMode -): string { - if (streamingMode === 'replace' || fetchedContent === undefined) { - return streamingContent - } - - if ( - fetchedContent.endsWith(streamingContent) || - fetchedContent.endsWith(`\n${streamingContent}`) - ) { - return fetchedContent - } - - return `${fetchedContent}\n${streamingContent}` -} - -function finalizeTextEditorContentState( - state: TextEditorContentState, - nextContent: string -): TextEditorContentState { - if ( - state.phase === 'ready' && - state.content === nextContent && - state.savedContent === nextContent && - state.lastStreamedContent === null - ) { - return state - } - - return { - phase: 'ready', - content: nextContent, - savedContent: nextContent, - lastStreamedContent: null, - } -} - -function moveTextEditorContentStateToStreaming( - state: TextEditorContentState, - nextContent: string -): TextEditorContentState { - if ( - state.phase === 'streaming' && - state.content === nextContent && - state.lastStreamedContent === nextContent - ) { - return state - } - - return { - ...state, - phase: 'streaming', - content: nextContent, - lastStreamedContent: nextContent, - } -} - -function moveTextEditorContentStateToReconcile( - state: TextEditorContentState -): TextEditorContentState { - if (state.phase === 'reconciling') { - return state - } - - return { - ...state, - phase: 'reconciling', - } -} - -function syncTextEditorContentState( - state: TextEditorContentState, - options: SyncTextEditorContentStateOptions -): TextEditorContentState { - const { canReconcileToFetchedContent, fetchedContent, streamingContent, streamingMode } = options - - if (streamingContent !== undefined) { - const nextContent = resolveStreamingEditorContent( - fetchedContent, - streamingContent, - streamingMode - ) - const fetchedMatchesNextContent = fetchedContent !== undefined && fetchedContent === nextContent - const fetchedMatchesLastStreamedContent = - fetchedContent !== undefined && - state.lastStreamedContent !== null && - fetchedContent === state.lastStreamedContent - const hasFetchedAdvanced = fetchedContent !== undefined && fetchedContent !== state.savedContent - - if ( - (state.phase === 'streaming' || state.phase === 'reconciling') && - (hasFetchedAdvanced || fetchedMatchesLastStreamedContent || fetchedMatchesNextContent) - ) { - return finalizeTextEditorContentState(state, fetchedContent) - } - - if ( - state.phase === 'ready' && - state.content === state.savedContent && - fetchedMatchesNextContent && - fetchedContent !== undefined - ) { - return finalizeTextEditorContentState(state, fetchedContent) - } - - return moveTextEditorContentStateToStreaming(state, nextContent) - } - - if (state.phase === 'streaming' || state.phase === 'reconciling') { - if (!canReconcileToFetchedContent) { - return finalizeTextEditorContentState(state, state.content) - } - - if (fetchedContent !== undefined) { - const hasFetchedAdvanced = fetchedContent !== state.savedContent - const fetchedMatchesLastStreamedContent = - state.lastStreamedContent !== null && fetchedContent === state.lastStreamedContent - - if (hasFetchedAdvanced || fetchedMatchesLastStreamedContent) { - return finalizeTextEditorContentState(state, fetchedContent) - } - } - - return moveTextEditorContentStateToReconcile(state) - } - - if (fetchedContent === undefined) { - return state - } - - if (state.phase === 'uninitialized') { - return finalizeTextEditorContentState(state, fetchedContent) - } - - if (fetchedContent === state.savedContent) { - return state - } - - if (state.content === state.savedContent) { - return finalizeTextEditorContentState(state, fetchedContent) - } - - return state -} - -function textEditorContentReducer( - state: TextEditorContentState, - action: TextEditorContentAction -): TextEditorContentState { - switch (action.type) { - case 'sync-external': - return syncTextEditorContentState(state, action) - case 'edit': - if (state.phase !== 'ready' || action.content === state.content) { - return state - } - return { - ...state, - content: action.content, - } - case 'save-success': - if ( - state.phase === 'ready' && - state.content === action.content && - state.savedContent === action.content && - state.lastStreamedContent === null - ) { - return state - } - return { - ...state, - phase: 'ready', - content: action.content, - savedContent: action.content, - lastStreamedContent: null, - } - default: - return state - } -} - -function useTextEditorContentState(options: SyncTextEditorContentStateOptions) { - const [state, dispatch] = useReducer(textEditorContentReducer, INITIAL_TEXT_EDITOR_CONTENT_STATE) - - useEffect(() => { - dispatch({ - type: 'sync-external', - ...options, - }) - }, [ - options.canReconcileToFetchedContent, - options.fetchedContent, - options.streamingContent, - options.streamingMode, - ]) - - const setDraftContent = useCallback((content: string) => { - dispatch({ type: 'edit', content }) - }, []) - - const markSavedContent = useCallback((content: string) => { - dispatch({ type: 'save-success', content }) - }, []) - - return { - content: state.content, - savedContent: state.savedContent, - isInitialized: state.phase !== 'uninitialized', - isStreamInteractionLocked: state.phase === 'streaming' || state.phase === 'reconciling', - setDraftContent, - markSavedContent, - } -} - export function FileViewer({ file, workspaceId, canEdit, - showPreview, previewMode, autoFocus, onDirtyChange, @@ -502,7 +65,6 @@ export function FileViewer({ streamingContent, streamingMode, disableStreamingAutoScroll = false, - useCodeRendererForCodeFiles = false, previewContextKey, }: FileViewerProps) { const category = resolveFileCategory(file.type, file.name) @@ -513,7 +75,7 @@ export function FileViewer({ file={file} workspaceId={workspaceId} canEdit={canEdit} - previewMode={previewMode ?? (showPreview ? 'preview' : 'editor')} + previewMode={previewMode ?? 'editor'} autoFocus={autoFocus} onDirtyChange={onDirtyChange} onSaveStatusChange={onSaveStatusChange} @@ -521,7 +83,6 @@ export function FileViewer({ streamingContent={streamingContent} streamingMode={streamingMode} disableStreamingAutoScroll={disableStreamingAutoScroll} - useCodeRendererForCodeFiles={useCodeRendererForCodeFiles} previewContextKey={previewContextKey} /> ) @@ -529,667 +90,221 @@ export function FileViewer({ if (category === 'iframe-previewable') { return ( - + ) } if (category === 'image-previewable') { - return + return + } + + if (category === 'audio-previewable') { + return + } + + if (category === 'video-previewable') { + return } if (category === 'docx-previewable') { - return + return ( + + ) } if (category === 'pptx-previewable') { - return + return ( + + ) } if (category === 'xlsx-previewable') { - return + return ( + + ) } return } -interface TextEditorProps { - file: WorkspaceFileRecord - workspaceId: string - canEdit: boolean - previewMode: PreviewMode - autoFocus?: boolean - onDirtyChange?: (isDirty: boolean) => void - onSaveStatusChange?: (status: 'idle' | 'saving' | 'saved' | 'error') => void - saveRef?: React.MutableRefObject<(() => Promise) | null> - streamingContent?: string - streamingMode?: StreamingMode - disableStreamingAutoScroll: boolean - useCodeRendererForCodeFiles?: boolean - previewContextKey?: string -} - -function TextEditor({ +const IframePreview = memo(function IframePreview({ file, workspaceId, - canEdit, - previewMode, - autoFocus, - onDirtyChange, - onSaveStatusChange, - saveRef, streamingContent, - streamingMode = 'append', - disableStreamingAutoScroll, - useCodeRendererForCodeFiles = false, - previewContextKey, -}: TextEditorProps) { - const textareaRef = useRef(null) - const containerRef = useRef(null) - const codeEditorRef = useRef(null) - const codeScrollRef = useRef(null) - const hasAutoFocusedRef = useRef(false) - - const [splitPct, setSplitPct] = useState(SPLIT_DEFAULT_PCT) - const [isResizing, setIsResizing] = useState(false) - const [visualLineHeights, setVisualLineHeights] = useState([]) - const [activeLineNumber, setActiveLineNumber] = useState(1) - - const { - data: fetchedContent, - isLoading, - error, - } = useWorkspaceFileContent( - workspaceId, - file.id, - file.key, - file.type === 'text/x-pptxgenjs' || - file.type === 'text/x-docxjs' || - file.type === 'text/x-pdflibjs' - ) - - const updateContent = useUpdateWorkspaceFileContent() - const updateContentRef = useRef(updateContent) - updateContentRef.current = updateContent - - const shouldUseCodeRenderer = useCodeRendererForCodeFiles && isCodeFile(file) - const codeLanguage = useMemo(() => resolveCodeEditorLanguage(file), [file.name, file.type]) - const onDirtyChangeRef = useRef(onDirtyChange) - const onSaveStatusChangeRef = useRef(onSaveStatusChange) - onDirtyChangeRef.current = onDirtyChange - onSaveStatusChangeRef.current = onSaveStatusChange - - const { - content, - savedContent, - isInitialized, - isStreamInteractionLocked, - setDraftContent, - markSavedContent, - } = useTextEditorContentState({ - canReconcileToFetchedContent: file.key.length > 0, - fetchedContent, - streamingContent, - streamingMode, - }) +}: { + file: WorkspaceFileRecord + workspaceId: string + streamingContent?: string +}) { + const [streamingBuffer, setStreamingBuffer] = useState(null) + const streamingBufferRef = useRef(null) + const streamingBufferSeqRef = useRef(0) + const [streamingBufferSeq, setStreamingBufferSeq] = useState(0) + const [rendering, setRendering] = useState(false) useEffect(() => { - if (!autoFocus || !isInitialized || hasAutoFocusedRef.current) { - return - } - - hasAutoFocusedRef.current = true - requestAnimationFrame(() => { - const editorTextarea = codeEditorRef.current?.querySelector('textarea') - if (editorTextarea instanceof HTMLTextAreaElement) { - editorTextarea.focus() - return - } - textareaRef.current?.focus() - }) - }, [autoFocus, isInitialized]) - - const handleContentChange = useCallback( - (value: string) => { - if (value === content) { - return - } - setDraftContent(value) - }, - [content, setDraftContent] - ) - - const onSave = useCallback(async () => { - if (content === savedContent) return - - await updateContentRef.current.mutateAsync({ - workspaceId, - fileId: file.id, - content, - }) - markSavedContent(content) - }, [content, file.id, markSavedContent, savedContent, workspaceId]) + if (streamingContent === undefined) return - const { saveStatus, saveImmediately, isDirty } = useAutosave({ - content, - savedContent, - onSave, - enabled: canEdit && isInitialized && !isStreamInteractionLocked, - }) + let cancelled = false + const controller = new AbortController() - useEffect(() => { - onDirtyChangeRef.current?.(isDirty) - }, [isDirty]) + const debounceTimer = setTimeout(async () => { + if (cancelled) return - useEffect(() => { - onSaveStatusChangeRef.current?.(saveStatus) - }, [saveStatus]) + try { + setRendering(true) - useEffect(() => { - if (!saveRef) { - return - } + const response = await fetch(`/api/workspaces/${workspaceId}/pdf/preview`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ code: streamingContent }), + signal: controller.signal, + }) + if (!response.ok) { + const err = await response.json().catch(() => ({ error: 'Preview failed' })) + throw new Error(err.error || 'Preview failed') + } - saveRef.current = saveImmediately + const buf = await response.arrayBuffer() + if (cancelled) return - return () => { - if (saveRef.current === saveImmediately) { - saveRef.current = null + streamingBufferRef.current = buf + streamingBufferSeqRef.current += 1 + setStreamingBuffer(buf) + setStreamingBufferSeq(streamingBufferSeqRef.current) + } catch (err) { + if (!cancelled && !(err instanceof DOMException && err.name === 'AbortError')) { + const msg = toError(err).message || 'Failed to render PDF' + logger.info('Transient PDF streaming preview error (suppressed)', { error: msg }) + } + } finally { + if (!cancelled) setRendering(false) } - } - }, [saveImmediately, saveRef]) - - useEffect(() => { - if (!isResizing) return - - const handleMouseMove = (e: MouseEvent) => { - const container = containerRef.current - if (!container) return - const rect = container.getBoundingClientRect() - const pct = ((e.clientX - rect.left) / rect.width) * 100 - setSplitPct(Math.min(SPLIT_MAX_PCT, Math.max(SPLIT_MIN_PCT, pct))) - } - - const handleMouseUp = () => setIsResizing(false) - - document.addEventListener('mousemove', handleMouseMove) - document.addEventListener('mouseup', handleMouseUp) - document.body.style.cursor = 'ew-resize' - document.body.style.userSelect = 'none' + }, 500) return () => { - document.removeEventListener('mousemove', handleMouseMove) - document.removeEventListener('mouseup', handleMouseUp) - document.body.style.cursor = '' - document.body.style.userSelect = '' + cancelled = true + clearTimeout(debounceTimer) + controller.abort() } - }, [isResizing]) + }, [streamingContent, workspaceId]) - const handleCheckboxToggle = useCallback( - (checkboxIndex: number, checked: boolean) => { - const toggled = toggleMarkdownCheckbox(content, checkboxIndex, checked) - if (toggled !== content) { - handleContentChange(toggled) - } - }, - [content, handleContentChange] + const staticSource = useMemo( + () => ({ + kind: 'url', + url: `/api/files/serve/${encodeURIComponent(file.key)}?context=workspace`, + }), + [file.key] ) - const isStreaming = isStreamInteractionLocked - const isEditorReadOnly = isStreamInteractionLocked || !canEdit - const renderedContent = content - const gutterWidthPx = useMemo(() => { - const lineCount = renderedContent.split('\n').length - return calculateGutterWidth(lineCount) - }, [renderedContent]) - const sharedCodeEditorProps = useMemo( - () => - getCodeEditorProps({ - disabled: isEditorReadOnly, - isStreaming: isStreaming, - }), - [isEditorReadOnly, isStreaming] - ) - const highlightCode = useMemo(() => { - return (value: string) => { - const grammar = languages[codeLanguage] || languages.javascript - return highlight(value, grammar, codeLanguage) - } - }, [codeLanguage]) - const handleCodeContentChange = useCallback( - (value: string) => { - if (isEditorReadOnly) return - handleContentChange(value) - }, - [handleContentChange, isEditorReadOnly] + const streamingSource = useMemo( + () => (streamingBuffer ? { kind: 'buffer', buffer: streamingBuffer } : null), + [streamingBuffer] ) - const textareaStuckRef = useRef(true) - const renderedContentRef = useRef(renderedContent) - renderedContentRef.current = renderedContent + if (streamingContent !== undefined) { + if (!streamingSource) { + return
{PDF_PAGE_SKELETON}
+ } + return + } - useEffect(() => { - if (!shouldUseCodeRenderer) return - const textarea = codeEditorRef.current?.querySelector('textarea') - if (!(textarea instanceof HTMLTextAreaElement)) return + return +}) - const updateActiveLineNumber = () => { - const pos = textarea.selectionStart - const textBeforeCursor = renderedContentRef.current.substring(0, pos) - const nextActiveLineNumber = textBeforeCursor.split('\n').length - setActiveLineNumber((currentLineNumber) => - currentLineNumber === nextActiveLineNumber ? currentLineNumber : nextActiveLineNumber - ) - } +function useBlobUrl(workspaceId: string, fileId: string, fileKey: string) { + const { data: fileData, isLoading, error } = useWorkspaceFileBinary(workspaceId, fileId, fileKey) + const [blobUrl, setBlobUrl] = useState(null) + const blobUrlRef = useRef(null) - textarea.addEventListener('click', updateActiveLineNumber) - textarea.addEventListener('keyup', updateActiveLineNumber) - textarea.addEventListener('focus', updateActiveLineNumber) + const replaceBlobUrl = useCallback((nextUrl: string | null) => { + const previousUrl = blobUrlRef.current + blobUrlRef.current = nextUrl + setBlobUrl(nextUrl) + if (previousUrl && previousUrl !== nextUrl) URL.revokeObjectURL(previousUrl) + }, []) + useEffect(() => { return () => { - textarea.removeEventListener('click', updateActiveLineNumber) - textarea.removeEventListener('keyup', updateActiveLineNumber) - textarea.removeEventListener('focus', updateActiveLineNumber) + if (blobUrlRef.current) { + URL.revokeObjectURL(blobUrlRef.current) + blobUrlRef.current = null + } } - }, [shouldUseCodeRenderer]) + }, []) - const calculateVisualLinesRef = useRef(() => {}) - calculateVisualLinesRef.current = () => { - const preElement = codeEditorRef.current?.querySelector('pre') - if (!(preElement instanceof HTMLElement)) return + return { fileData, isLoading, error, blobUrl, replaceBlobUrl } +} - const lines = renderedContentRef.current.split('\n') - const newVisualLineHeights: number[] = [] +const AudioPreview = memo(function AudioPreview({ + file, + workspaceId, +}: { + file: WorkspaceFileRecord + workspaceId: string +}) { + const { + fileData, + isLoading, + error: fetchError, + blobUrl, + replaceBlobUrl, + } = useBlobUrl(workspaceId, file.id, file.key) - const tempContainer = document.createElement('div') - tempContainer.style.cssText = ` - position: absolute; - visibility: hidden; - height: auto; - width: ${preElement.clientWidth}px; - font-family: ${window.getComputedStyle(preElement).fontFamily}; - font-size: ${window.getComputedStyle(preElement).fontSize}; - line-height: ${CODE_EDITOR_LINE_HEIGHT_PX}px; - padding: 8px; - white-space: pre-wrap; - word-break: break-word; - box-sizing: border-box; - ` - document.body.appendChild(tempContainer) + useEffect(() => { + if (!fileData) return + replaceBlobUrl(URL.createObjectURL(new Blob([fileData], { type: file.type || 'audio/mpeg' }))) + }, [file.type, fileData, replaceBlobUrl]) - lines.forEach((line) => { - const lineDiv = document.createElement('div') - lineDiv.textContent = line || ' ' - tempContainer.appendChild(lineDiv) - const actualHeight = lineDiv.getBoundingClientRect().height - const lineUnits = Math.max(1, Math.ceil(actualHeight / CODE_EDITOR_LINE_HEIGHT_PX)) - newVisualLineHeights.push(lineUnits) - tempContainer.removeChild(lineDiv) - }) + const error = blobUrl !== null ? null : resolvePreviewError(fetchError, null) + if (error) return - document.body.removeChild(tempContainer) - setVisualLineHeights((currentVisualLineHeights) => - areNumberArraysEqual(currentVisualLineHeights, newVisualLineHeights) - ? currentVisualLineHeights - : newVisualLineHeights - ) - } - - useEffect(() => { - if (!shouldUseCodeRenderer || !codeEditorRef.current) return - - const resizeObserver = new ResizeObserver(() => calculateVisualLinesRef.current()) - resizeObserver.observe(codeEditorRef.current) - - return () => { - resizeObserver.disconnect() - } - }, [shouldUseCodeRenderer]) - - useEffect(() => { - if (!shouldUseCodeRenderer) return - calculateVisualLinesRef.current() - }, [renderedContent, shouldUseCodeRenderer]) - - const renderCodeLineNumbers = useCallback((): ReactElement[] => { - const numbers: ReactElement[] = [] - let lineNumber = 1 - - visualLineHeights.forEach((height) => { - const isActive = lineNumber === activeLineNumber - numbers.push( -
- {lineNumber} -
- ) - - for (let i = 1; i < height; i++) { - numbers.push( -
- {lineNumber} -
- ) - } - - lineNumber++ - }) - - if (numbers.length === 0) { - numbers.push( -
- 1 -
- ) - } - - return numbers - }, [activeLineNumber, visualLineHeights]) - - useEffect(() => { - if (!isStreaming) return - if (disableStreamingAutoScroll) { - textareaStuckRef.current = false - return - } - textareaStuckRef.current = true - - const el = (shouldUseCodeRenderer ? codeScrollRef.current : textareaRef.current) ?? null - if (!el) return - - const onWheel = (e: Event) => { - if ((e as WheelEvent).deltaY < 0) textareaStuckRef.current = false - } - - const onScroll = () => { - const dist = el.scrollHeight - el.scrollTop - el.clientHeight - if (dist <= 5) textareaStuckRef.current = true - } - - el.addEventListener('wheel', onWheel, { passive: true }) - el.addEventListener('scroll', onScroll, { passive: true }) - - return () => { - el.removeEventListener('wheel', onWheel) - el.removeEventListener('scroll', onScroll) - } - }, [disableStreamingAutoScroll, isStreaming, shouldUseCodeRenderer]) - - useEffect(() => { - if (!isStreaming || !textareaStuckRef.current || disableStreamingAutoScroll) return - const el = (shouldUseCodeRenderer ? codeScrollRef.current : textareaRef.current) ?? null - if (!el) return - el.scrollTop = el.scrollHeight - }, [disableStreamingAutoScroll, isStreaming, renderedContent, shouldUseCodeRenderer]) - - const previewType = resolvePreviewType(file.type, file.name) - const isIframeRendered = previewType === 'html' || previewType === 'svg' - const effectiveMode = isStreaming && isIframeRendered ? 'editor' : previewMode - const showEditor = effectiveMode !== 'preview' - const showPreviewPane = effectiveMode !== 'editor' - - if (streamingContent === undefined) { - if (isLoading) return DOCUMENT_SKELETON - - if (error && !isInitialized) { - return ( -
-

Failed to load file content

-
- ) - } - } - - return ( -
- {showEditor && - (shouldUseCodeRenderer ? ( -
-
- - - {renderCodeLineNumbers()} - - - - - -
-
- ) : ( -
editConfig && startEdit(-1, i, String(header ?? ''))} > - {String(header ?? '')} + {isEditing(-1, i) ? ( + setEditValue(e.target.value)} + onBlur={commitEdit} + onKeyDown={handleKeyDown} + className='w-full min-w-[60px] bg-transparent font-semibold text-[12px] text-[var(--text-primary)] outline-none ring-1 ring-[var(--brand-secondary)] ring-inset' + /> + ) : ( + String(header ?? '') + )}
- {String(row[ci] ?? '')} + editConfig && startEdit(ri, ci, String(row[ci] ?? ''))} + > + {isEditing(ri, ci) ? ( + setEditValue(e.target.value)} + onBlur={commitEdit} + onKeyDown={handleKeyDown} + className='w-full min-w-[60px] bg-transparent text-[13px] text-[var(--text-secondary)] outline-none ring-1 ring-[var(--brand-secondary)] ring-inset' + /> + ) : ( + String(row[ci] ?? '') + )}