Compare commits
27 Commits
feat/serve
...
pr/JoelJac
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7876c5cc58 | ||
|
|
e09fb32219 | ||
|
|
b5cf2fa1b4 | ||
|
|
0e2dd400e4 | ||
|
|
f99bfda88b | ||
|
|
446d29f519 | ||
|
|
d0032d6f50 | ||
|
|
fbf5d5ba71 | ||
|
|
14506cd92a | ||
|
|
f38e2b6884 | ||
|
|
ac11a804bd | ||
|
|
25d8c28e4f | ||
|
|
ae9e32b427 | ||
|
|
041eeab4d2 | ||
|
|
6d1e8c4eeb | ||
|
|
08992089f7 | ||
|
|
cc45ff595b | ||
|
|
ba58e25278 | ||
|
|
e261f75cc8 | ||
|
|
55293a0382 | ||
|
|
f7deff5448 | ||
|
|
8877dae2dd | ||
|
|
162434fad4 | ||
|
|
5095c2b76f | ||
|
|
70c4ef5699 | ||
|
|
5959f422a0 | ||
|
|
5c59e55e53 |
@@ -112,17 +112,17 @@ services:
|
||||
build:
|
||||
dockerfile: packages/hoppscotch-backend/Dockerfile
|
||||
context: .
|
||||
target: dev
|
||||
target: prod
|
||||
env_file:
|
||||
- ./.env
|
||||
restart: always
|
||||
environment:
|
||||
# Edit the below line to match your PostgresDB URL if you have an outside DB (make sure to update the .env file as well)
|
||||
- DATABASE_URL=postgresql://postgres:testpass@hoppscotch-db:5432/hoppscotch?connect_timeout=300
|
||||
# - DATABASE_URL=postgresql://postgres:testpass@hoppscotch-db:5432/hoppscotch?connect_timeout=300
|
||||
- PORT=3000
|
||||
volumes:
|
||||
# Uncomment the line below when modifying code. Only applicable when using the "dev" target.
|
||||
- ./packages/hoppscotch-backend/:/usr/src/app
|
||||
# - ./packages/hoppscotch-backend/:/usr/src/app
|
||||
- /usr/src/app/node_modules/
|
||||
depends_on:
|
||||
hoppscotch-db:
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE
|
||||
"TeamCollection"
|
||||
ADD
|
||||
titleSearch tsvector GENERATED ALWAYS AS (to_tsvector('english', title)) STORED;
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE
|
||||
"TeamRequest"
|
||||
ADD
|
||||
titleSearch tsvector GENERATED ALWAYS AS (to_tsvector('english', title)) STORED;
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "TeamCollection_textSearch_idx" ON "TeamCollection" USING GIN (titleSearch);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "TeamRequest_textSearch_idx" ON "TeamRequest" USING GIN (titleSearch);
|
||||
@@ -41,31 +41,31 @@ model TeamInvitation {
|
||||
}
|
||||
|
||||
model TeamCollection {
|
||||
id String @id @default(cuid())
|
||||
id String @id @default(cuid())
|
||||
parentID String?
|
||||
data Json?
|
||||
parent TeamCollection? @relation("TeamCollectionChildParent", fields: [parentID], references: [id])
|
||||
children TeamCollection[] @relation("TeamCollectionChildParent")
|
||||
parent TeamCollection? @relation("TeamCollectionChildParent", fields: [parentID], references: [id])
|
||||
children TeamCollection[] @relation("TeamCollectionChildParent")
|
||||
requests TeamRequest[]
|
||||
teamID String
|
||||
team Team @relation(fields: [teamID], references: [id], onDelete: Cascade)
|
||||
team Team @relation(fields: [teamID], references: [id], onDelete: Cascade)
|
||||
title String
|
||||
orderIndex Int
|
||||
createdOn DateTime @default(now()) @db.Timestamp(3)
|
||||
updatedOn DateTime @updatedAt @db.Timestamp(3)
|
||||
createdOn DateTime @default(now()) @db.Timestamp(3)
|
||||
updatedOn DateTime @updatedAt @db.Timestamp(3)
|
||||
}
|
||||
|
||||
model TeamRequest {
|
||||
id String @id @default(cuid())
|
||||
id String @id @default(cuid())
|
||||
collectionID String
|
||||
collection TeamCollection @relation(fields: [collectionID], references: [id], onDelete: Cascade)
|
||||
collection TeamCollection @relation(fields: [collectionID], references: [id], onDelete: Cascade)
|
||||
teamID String
|
||||
team Team @relation(fields: [teamID], references: [id], onDelete: Cascade)
|
||||
team Team @relation(fields: [teamID], references: [id], onDelete: Cascade)
|
||||
title String
|
||||
request Json
|
||||
orderIndex Int
|
||||
createdOn DateTime @default(now()) @db.Timestamp(3)
|
||||
updatedOn DateTime @updatedAt @db.Timestamp(3)
|
||||
createdOn DateTime @default(now()) @db.Timestamp(3)
|
||||
updatedOn DateTime @updatedAt @db.Timestamp(3)
|
||||
}
|
||||
|
||||
model Shortcode {
|
||||
|
||||
@@ -32,7 +32,7 @@ import {
|
||||
EnableAndDisableSSOArgs,
|
||||
InfraConfigArgs,
|
||||
} from 'src/infra-config/input-args';
|
||||
import { InfraConfigEnum } from 'src/types/InfraConfig';
|
||||
import { InfraConfigEnumForClient } from 'src/types/InfraConfig';
|
||||
import { ServiceStatus } from 'src/infra-config/helper';
|
||||
|
||||
@UseGuards(GqlThrottlerGuard)
|
||||
@@ -274,10 +274,10 @@ export class InfraResolver {
|
||||
async infraConfigs(
|
||||
@Args({
|
||||
name: 'configNames',
|
||||
type: () => [InfraConfigEnum],
|
||||
type: () => [InfraConfigEnumForClient],
|
||||
description: 'Configs to fetch',
|
||||
})
|
||||
names: InfraConfigEnum[],
|
||||
names: InfraConfigEnumForClient[],
|
||||
) {
|
||||
const infraConfigs = await this.infraConfigService.getMany(names);
|
||||
if (E.isLeft(infraConfigs)) throwErr(infraConfigs.left);
|
||||
|
||||
@@ -18,7 +18,12 @@ import { JwtAuthGuard } from './guards/jwt-auth.guard';
|
||||
import { GqlUser } from 'src/decorators/gql-user.decorator';
|
||||
import { AuthUser } from 'src/types/AuthUser';
|
||||
import { RTCookie } from 'src/decorators/rt-cookie.decorator';
|
||||
import { AuthProvider, authCookieHandler, authProviderCheck } from './helper';
|
||||
import {
|
||||
AuthProvider,
|
||||
authCookieHandler,
|
||||
authProviderCheck,
|
||||
throwHTTPErr,
|
||||
} from './helper';
|
||||
import { GoogleSSOGuard } from './guards/google-sso.guard';
|
||||
import { GithubSSOGuard } from './guards/github-sso.guard';
|
||||
import { MicrosoftSSOGuard } from './guards/microsoft-sso-.guard';
|
||||
@@ -26,7 +31,6 @@ import { ThrottlerBehindProxyGuard } from 'src/guards/throttler-behind-proxy.gua
|
||||
import { SkipThrottle } from '@nestjs/throttler';
|
||||
import { AUTH_PROVIDER_NOT_SPECIFIED } from 'src/errors';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { throwHTTPErr } from 'src/utils';
|
||||
|
||||
@UseGuards(ThrottlerBehindProxyGuard)
|
||||
@Controller({ path: 'auth', version: '1' })
|
||||
|
||||
@@ -12,10 +12,7 @@ import { GithubStrategy } from './strategies/github.strategy';
|
||||
import { MicrosoftStrategy } from './strategies/microsoft.strategy';
|
||||
import { AuthProvider, authProviderCheck } from './helper';
|
||||
import { ConfigModule, ConfigService } from '@nestjs/config';
|
||||
import {
|
||||
isInfraConfigTablePopulated,
|
||||
loadInfraConfiguration,
|
||||
} from 'src/infra-config/helper';
|
||||
import { loadInfraConfiguration } from 'src/infra-config/helper';
|
||||
import { InfraConfigModule } from 'src/infra-config/infra-config.module';
|
||||
|
||||
@Module({
|
||||
@@ -37,11 +34,6 @@ import { InfraConfigModule } from 'src/infra-config/infra-config.module';
|
||||
})
|
||||
export class AuthModule {
|
||||
static async register() {
|
||||
const isInfraConfigPopulated = await isInfraConfigTablePopulated();
|
||||
if (!isInfraConfigPopulated) {
|
||||
return { module: AuthModule };
|
||||
}
|
||||
|
||||
const env = await loadInfraConfiguration();
|
||||
const allowedAuthProviders = env.INFRA.VITE_ALLOWED_AUTH_PROVIDERS;
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ import {
|
||||
RefreshTokenPayload,
|
||||
} from 'src/types/AuthTokens';
|
||||
import { JwtService } from '@nestjs/jwt';
|
||||
import { RESTError } from 'src/types/RESTError';
|
||||
import { AuthError } from 'src/types/AuthError';
|
||||
import { AuthUser, IsAdmin } from 'src/types/AuthUser';
|
||||
import { VerificationToken } from '@prisma/client';
|
||||
import { Origin } from './helper';
|
||||
@@ -117,7 +117,7 @@ export class AuthService {
|
||||
userUid,
|
||||
);
|
||||
if (E.isLeft(updatedUser))
|
||||
return E.left(<RESTError>{
|
||||
return E.left(<AuthError>{
|
||||
message: updatedUser.left,
|
||||
statusCode: HttpStatus.NOT_FOUND,
|
||||
});
|
||||
@@ -255,7 +255,7 @@ export class AuthService {
|
||||
*/
|
||||
async verifyMagicLinkTokens(
|
||||
magicLinkIDTokens: VerifyMagicDto,
|
||||
): Promise<E.Right<AuthTokens> | E.Left<RESTError>> {
|
||||
): Promise<E.Right<AuthTokens> | E.Left<AuthError>> {
|
||||
const passwordlessTokens = await this.validatePasswordlessTokens(
|
||||
magicLinkIDTokens,
|
||||
);
|
||||
@@ -373,7 +373,7 @@ export class AuthService {
|
||||
if (usersCount === 1) {
|
||||
const elevatedUser = await this.usersService.makeAdmin(user.uid);
|
||||
if (E.isLeft(elevatedUser))
|
||||
return E.left(<RESTError>{
|
||||
return E.left(<AuthError>{
|
||||
message: elevatedUser.left,
|
||||
statusCode: HttpStatus.NOT_FOUND,
|
||||
});
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import { CanActivate, ExecutionContext, Injectable } from '@nestjs/common';
|
||||
import { AuthGuard } from '@nestjs/passport';
|
||||
import { AuthProvider, authProviderCheck } from '../helper';
|
||||
import { AuthProvider, authProviderCheck, throwHTTPErr } from '../helper';
|
||||
import { Observable } from 'rxjs';
|
||||
import { AUTH_PROVIDER_NOT_SPECIFIED } from 'src/errors';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { throwHTTPErr } from 'src/utils';
|
||||
|
||||
@Injectable()
|
||||
export class GithubSSOGuard extends AuthGuard('github') implements CanActivate {
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import { CanActivate, ExecutionContext, Injectable } from '@nestjs/common';
|
||||
import { AuthGuard } from '@nestjs/passport';
|
||||
import { AuthProvider, authProviderCheck } from '../helper';
|
||||
import { AuthProvider, authProviderCheck, throwHTTPErr } from '../helper';
|
||||
import { Observable } from 'rxjs';
|
||||
import { AUTH_PROVIDER_NOT_SPECIFIED } from 'src/errors';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { throwHTTPErr } from 'src/utils';
|
||||
|
||||
@Injectable()
|
||||
export class GoogleSSOGuard extends AuthGuard('google') implements CanActivate {
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import { CanActivate, ExecutionContext, Injectable } from '@nestjs/common';
|
||||
import { AuthGuard } from '@nestjs/passport';
|
||||
import { AuthProvider, authProviderCheck } from '../helper';
|
||||
import { AuthProvider, authProviderCheck, throwHTTPErr } from '../helper';
|
||||
import { Observable } from 'rxjs';
|
||||
import { AUTH_PROVIDER_NOT_SPECIFIED } from 'src/errors';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { throwHTTPErr } from 'src/utils';
|
||||
|
||||
@Injectable()
|
||||
export class MicrosoftSSOGuard
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { HttpException, HttpStatus } from '@nestjs/common';
|
||||
import { DateTime } from 'luxon';
|
||||
import { AuthError } from 'src/types/AuthError';
|
||||
import { AuthTokens } from 'src/types/AuthTokens';
|
||||
import { Response } from 'express';
|
||||
import * as cookie from 'cookie';
|
||||
@@ -24,6 +25,15 @@ export enum AuthProvider {
|
||||
EMAIL = 'EMAIL',
|
||||
}
|
||||
|
||||
/**
|
||||
* This function allows throw to be used as an expression
|
||||
* @param errMessage Message present in the error message
|
||||
*/
|
||||
export function throwHTTPErr(errorData: AuthError): never {
|
||||
const { message, statusCode } = errorData;
|
||||
throw new HttpException(message, statusCode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets and returns the cookies in the response object on successful authentication
|
||||
* @param res Express Response Object
|
||||
|
||||
@@ -17,8 +17,8 @@ export class GithubStrategy extends PassportStrategy(Strategy) {
|
||||
super({
|
||||
clientID: configService.get('INFRA.GITHUB_CLIENT_ID'),
|
||||
clientSecret: configService.get('INFRA.GITHUB_CLIENT_SECRET'),
|
||||
callbackURL: configService.get('INFRA.GITHUB_CALLBACK_URL'),
|
||||
scope: [configService.get('INFRA.GITHUB_SCOPE')],
|
||||
callbackURL: configService.get('GITHUB_CALLBACK_URL'),
|
||||
scope: [configService.get('GITHUB_SCOPE')],
|
||||
store: true,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -17,8 +17,8 @@ export class GoogleStrategy extends PassportStrategy(Strategy) {
|
||||
super({
|
||||
clientID: configService.get('INFRA.GOOGLE_CLIENT_ID'),
|
||||
clientSecret: configService.get('INFRA.GOOGLE_CLIENT_SECRET'),
|
||||
callbackURL: configService.get('INFRA.GOOGLE_CALLBACK_URL'),
|
||||
scope: configService.get('INFRA.GOOGLE_SCOPE').split(','),
|
||||
callbackURL: configService.get('GOOGLE_CALLBACK_URL'),
|
||||
scope: configService.get('GOOGLE_SCOPE').split(','),
|
||||
passReqToCallback: true,
|
||||
store: true,
|
||||
});
|
||||
|
||||
@@ -17,9 +17,9 @@ export class MicrosoftStrategy extends PassportStrategy(Strategy) {
|
||||
super({
|
||||
clientID: configService.get('INFRA.MICROSOFT_CLIENT_ID'),
|
||||
clientSecret: configService.get('INFRA.MICROSOFT_CLIENT_SECRET'),
|
||||
callbackURL: configService.get('INFRA.MICROSOFT_CALLBACK_URL'),
|
||||
scope: [configService.get('INFRA.MICROSOFT_SCOPE')],
|
||||
tenant: configService.get('INFRA.MICROSOFT_TENANT'),
|
||||
callbackURL: configService.get('MICROSOFT_CALLBACK_URL'),
|
||||
scope: [configService.get('MICROSOFT_SCOPE')],
|
||||
tenant: configService.get('MICROSOFT_TENANT'),
|
||||
store: true,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -228,12 +228,6 @@ export const TEAM_COL_NOT_SAME_PARENT =
|
||||
export const TEAM_COL_SAME_NEXT_COLL =
|
||||
'team_coll/collection_and_next_collection_are_same';
|
||||
|
||||
/**
|
||||
* Team Collection search failed
|
||||
* (TeamCollectionService)
|
||||
*/
|
||||
export const TEAM_COL_SEARCH_FAILED = 'team_coll/team_collection_search_failed';
|
||||
|
||||
/**
|
||||
* Team Collection Re-Ordering Failed
|
||||
* (TeamCollectionService)
|
||||
@@ -289,13 +283,6 @@ export const TEAM_NOT_OWNER = 'team_coll/team_not_owner' as const;
|
||||
export const TEAM_COLL_DATA_INVALID =
|
||||
'team_coll/team_coll_data_invalid' as const;
|
||||
|
||||
/**
|
||||
* Team Collection parent tree generation failed
|
||||
* (TeamCollectionService)
|
||||
*/
|
||||
export const TEAM_COLL_PARENT_TREE_GEN_FAILED =
|
||||
'team_coll/team_coll_parent_tree_generation_failed';
|
||||
|
||||
/**
|
||||
* Tried to perform an action on a request that doesn't accept their member role level
|
||||
* (GqlRequestTeamMemberGuard)
|
||||
@@ -321,19 +308,6 @@ export const TEAM_REQ_INVALID_TARGET_COLL_ID =
|
||||
*/
|
||||
export const TEAM_REQ_REORDERING_FAILED = 'team_req/reordering_failed' as const;
|
||||
|
||||
/**
|
||||
* Team Request search failed
|
||||
* (TeamRequestService)
|
||||
*/
|
||||
export const TEAM_REQ_SEARCH_FAILED = 'team_req/team_request_search_failed';
|
||||
|
||||
/**
|
||||
* Team Request parent tree generation failed
|
||||
* (TeamRequestService)
|
||||
*/
|
||||
export const TEAM_REQ_PARENT_TREE_GEN_FAILED =
|
||||
'team_req/team_req_parent_tree_generation_failed';
|
||||
|
||||
/**
|
||||
* No Postmark Sender Email defined
|
||||
* (AuthService)
|
||||
@@ -731,13 +705,6 @@ export const INFRA_CONFIG_INVALID_INPUT = 'infra_config/invalid_input' as const;
|
||||
export const INFRA_CONFIG_SERVICE_NOT_CONFIGURED =
|
||||
'infra_config/service_not_configured' as const;
|
||||
|
||||
/**
|
||||
* Infra Config update/fetch operation not allowed
|
||||
* (InfraConfigService)
|
||||
*/
|
||||
export const INFRA_CONFIG_OPERATION_NOT_ALLOWED =
|
||||
'infra_config/operation_not_allowed';
|
||||
|
||||
/**
|
||||
* Error message for when the database table does not exist
|
||||
* (InfraConfigService)
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
import { AuthProvider } from 'src/auth/helper';
|
||||
import {
|
||||
AUTH_PROVIDER_NOT_CONFIGURED,
|
||||
DATABASE_TABLE_NOT_EXIST,
|
||||
} from 'src/errors';
|
||||
import { AUTH_PROVIDER_NOT_CONFIGURED } from 'src/errors';
|
||||
import { PrismaService } from 'src/prisma/prisma.service';
|
||||
import { InfraConfigEnum } from 'src/types/InfraConfig';
|
||||
import { throwErr } from 'src/utils';
|
||||
@@ -17,21 +14,14 @@ const AuthProviderConfigurations = {
|
||||
[AuthProvider.GOOGLE]: [
|
||||
InfraConfigEnum.GOOGLE_CLIENT_ID,
|
||||
InfraConfigEnum.GOOGLE_CLIENT_SECRET,
|
||||
InfraConfigEnum.GOOGLE_CALLBACK_URL,
|
||||
InfraConfigEnum.GOOGLE_SCOPE,
|
||||
],
|
||||
[AuthProvider.GITHUB]: [
|
||||
InfraConfigEnum.GITHUB_CLIENT_ID,
|
||||
InfraConfigEnum.GITHUB_CLIENT_SECRET,
|
||||
InfraConfigEnum.GITHUB_CALLBACK_URL,
|
||||
InfraConfigEnum.GITHUB_SCOPE,
|
||||
],
|
||||
[AuthProvider.MICROSOFT]: [
|
||||
InfraConfigEnum.MICROSOFT_CLIENT_ID,
|
||||
InfraConfigEnum.MICROSOFT_CLIENT_SECRET,
|
||||
InfraConfigEnum.MICROSOFT_CALLBACK_URL,
|
||||
InfraConfigEnum.MICROSOFT_SCOPE,
|
||||
InfraConfigEnum.MICROSOFT_TENANT,
|
||||
],
|
||||
[AuthProvider.EMAIL]: [
|
||||
InfraConfigEnum.MAILER_SMTP_URL,
|
||||
@@ -64,125 +54,6 @@ export async function loadInfraConfiguration() {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the default values from .env file and return them as an array
|
||||
* @returns Array of default infra configs
|
||||
*/
|
||||
export async function getDefaultInfraConfigs(): Promise<
|
||||
{ name: InfraConfigEnum; value: string }[]
|
||||
> {
|
||||
const prisma = new PrismaService();
|
||||
|
||||
// Prepare rows for 'infra_config' table with default values (from .env) for each 'name'
|
||||
const infraConfigDefaultObjs: { name: InfraConfigEnum; value: string }[] = [
|
||||
{
|
||||
name: InfraConfigEnum.MAILER_SMTP_URL,
|
||||
value: process.env.MAILER_SMTP_URL,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.MAILER_ADDRESS_FROM,
|
||||
value: process.env.MAILER_ADDRESS_FROM,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.GOOGLE_CLIENT_ID,
|
||||
value: process.env.GOOGLE_CLIENT_ID,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.GOOGLE_CLIENT_SECRET,
|
||||
value: process.env.GOOGLE_CLIENT_SECRET,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.GOOGLE_CALLBACK_URL,
|
||||
value: process.env.GOOGLE_CALLBACK_URL,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.GOOGLE_SCOPE,
|
||||
value: process.env.GOOGLE_SCOPE,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.GITHUB_CLIENT_ID,
|
||||
value: process.env.GITHUB_CLIENT_ID,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.GITHUB_CLIENT_SECRET,
|
||||
value: process.env.GITHUB_CLIENT_SECRET,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.GITHUB_CALLBACK_URL,
|
||||
value: process.env.GITHUB_CALLBACK_URL,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.GITHUB_SCOPE,
|
||||
value: process.env.GITHUB_SCOPE,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.MICROSOFT_CLIENT_ID,
|
||||
value: process.env.MICROSOFT_CLIENT_ID,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.MICROSOFT_CLIENT_SECRET,
|
||||
value: process.env.MICROSOFT_CLIENT_SECRET,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.MICROSOFT_CALLBACK_URL,
|
||||
value: process.env.MICROSOFT_CALLBACK_URL,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.MICROSOFT_SCOPE,
|
||||
value: process.env.MICROSOFT_SCOPE,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.MICROSOFT_TENANT,
|
||||
value: process.env.MICROSOFT_TENANT,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.VITE_ALLOWED_AUTH_PROVIDERS,
|
||||
value: getConfiguredSSOProviders(),
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.ALLOW_ANALYTICS_COLLECTION,
|
||||
value: false.toString(),
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.ANALYTICS_USER_ID,
|
||||
value: generateAnalyticsUserId(),
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.IS_FIRST_TIME_INFRA_SETUP,
|
||||
value: (await prisma.infraConfig.count()) === 0 ? 'true' : 'false',
|
||||
},
|
||||
];
|
||||
|
||||
return infraConfigDefaultObjs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify if 'infra_config' table is loaded with all entries
|
||||
* @returns boolean
|
||||
*/
|
||||
export async function isInfraConfigTablePopulated(): Promise<boolean> {
|
||||
const prisma = new PrismaService();
|
||||
try {
|
||||
const dbInfraConfigs = await prisma.infraConfig.findMany();
|
||||
const infraConfigDefaultObjs = await getDefaultInfraConfigs();
|
||||
|
||||
const propsRemainingToInsert = infraConfigDefaultObjs.filter(
|
||||
(p) => !dbInfraConfigs.find((e) => e.name === p.name),
|
||||
);
|
||||
|
||||
if (propsRemainingToInsert.length > 0) {
|
||||
console.log(
|
||||
'Infra Config table is not populated with all entries. Populating now...',
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the app after 5 seconds
|
||||
* (Docker will re-start the app)
|
||||
|
||||
@@ -4,9 +4,9 @@ import { InfraConfigService } from './infra-config.service';
|
||||
import * as E from 'fp-ts/Either';
|
||||
import { JwtAuthGuard } from 'src/auth/guards/jwt-auth.guard';
|
||||
import { RESTAdminGuard } from 'src/admin/guards/rest-admin.guard';
|
||||
import { RESTError } from 'src/types/RESTError';
|
||||
import { InfraConfigEnum } from 'src/types/InfraConfig';
|
||||
import { throwHTTPErr } from 'src/utils';
|
||||
import { throwHTTPErr } from 'src/auth/helper';
|
||||
import { AuthError } from 'src/types/AuthError';
|
||||
import { InfraConfigEnumForClient } from 'src/types/InfraConfig';
|
||||
|
||||
@UseGuards(ThrottlerBehindProxyGuard)
|
||||
@Controller({ path: 'site', version: '1' })
|
||||
@@ -17,11 +17,11 @@ export class SiteController {
|
||||
@UseGuards(JwtAuthGuard, RESTAdminGuard)
|
||||
async fetchSetupInfo() {
|
||||
const status = await this.infraConfigService.get(
|
||||
InfraConfigEnum.IS_FIRST_TIME_INFRA_SETUP,
|
||||
InfraConfigEnumForClient.IS_FIRST_TIME_INFRA_SETUP,
|
||||
);
|
||||
|
||||
if (E.isLeft(status))
|
||||
throwHTTPErr(<RESTError>{
|
||||
throwHTTPErr(<AuthError>{
|
||||
message: status.left,
|
||||
statusCode: HttpStatus.NOT_FOUND,
|
||||
});
|
||||
@@ -32,13 +32,13 @@ export class SiteController {
|
||||
@UseGuards(JwtAuthGuard, RESTAdminGuard)
|
||||
async setSetupAsComplete() {
|
||||
const res = await this.infraConfigService.update(
|
||||
InfraConfigEnum.IS_FIRST_TIME_INFRA_SETUP,
|
||||
InfraConfigEnumForClient.IS_FIRST_TIME_INFRA_SETUP,
|
||||
false.toString(),
|
||||
false,
|
||||
);
|
||||
|
||||
if (E.isLeft(res))
|
||||
throwHTTPErr(<RESTError>{
|
||||
throwHTTPErr(<AuthError>{
|
||||
message: res.left,
|
||||
statusCode: HttpStatus.FORBIDDEN,
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Field, ObjectType, registerEnumType } from '@nestjs/graphql';
|
||||
import { AuthProvider } from 'src/auth/helper';
|
||||
import { InfraConfigEnum } from 'src/types/InfraConfig';
|
||||
import { InfraConfigEnumForClient } from 'src/types/InfraConfig';
|
||||
import { ServiceStatus } from './helper';
|
||||
|
||||
@ObjectType()
|
||||
@@ -8,7 +8,7 @@ export class InfraConfig {
|
||||
@Field({
|
||||
description: 'Infra Config Name',
|
||||
})
|
||||
name: InfraConfigEnum;
|
||||
name: InfraConfigEnumForClient;
|
||||
|
||||
@Field({
|
||||
description: 'Infra Config Value',
|
||||
@@ -16,7 +16,7 @@ export class InfraConfig {
|
||||
value: string;
|
||||
}
|
||||
|
||||
registerEnumType(InfraConfigEnum, {
|
||||
registerEnumType(InfraConfigEnumForClient, {
|
||||
name: 'InfraConfigEnum',
|
||||
});
|
||||
|
||||
|
||||
@@ -1,16 +1,13 @@
|
||||
import { mockDeep, mockReset } from 'jest-mock-extended';
|
||||
import { PrismaService } from 'src/prisma/prisma.service';
|
||||
import { InfraConfigService } from './infra-config.service';
|
||||
import { InfraConfigEnum } from 'src/types/InfraConfig';
|
||||
import {
|
||||
INFRA_CONFIG_NOT_FOUND,
|
||||
INFRA_CONFIG_OPERATION_NOT_ALLOWED,
|
||||
INFRA_CONFIG_UPDATE_FAILED,
|
||||
} from 'src/errors';
|
||||
InfraConfigEnum,
|
||||
InfraConfigEnumForClient,
|
||||
} from 'src/types/InfraConfig';
|
||||
import { INFRA_CONFIG_NOT_FOUND, INFRA_CONFIG_UPDATE_FAILED } from 'src/errors';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import * as helper from './helper';
|
||||
import { InfraConfig as dbInfraConfig } from '@prisma/client';
|
||||
import { InfraConfig } from './infra-config.model';
|
||||
|
||||
const mockPrisma = mockDeep<PrismaService>();
|
||||
const mockConfigService = mockDeep<ConfigService>();
|
||||
@@ -22,82 +19,12 @@ const infraConfigService = new InfraConfigService(
|
||||
mockConfigService,
|
||||
);
|
||||
|
||||
const INITIALIZED_DATE_CONST = new Date();
|
||||
const dbInfraConfigs: dbInfraConfig[] = [
|
||||
{
|
||||
id: '3',
|
||||
name: InfraConfigEnum.GOOGLE_CLIENT_ID,
|
||||
value: 'abcdefghijkl',
|
||||
active: true,
|
||||
createdOn: INITIALIZED_DATE_CONST,
|
||||
updatedOn: INITIALIZED_DATE_CONST,
|
||||
},
|
||||
{
|
||||
id: '4',
|
||||
name: InfraConfigEnum.VITE_ALLOWED_AUTH_PROVIDERS,
|
||||
value: 'google',
|
||||
active: true,
|
||||
createdOn: INITIALIZED_DATE_CONST,
|
||||
updatedOn: INITIALIZED_DATE_CONST,
|
||||
},
|
||||
];
|
||||
const infraConfigs: InfraConfig[] = [
|
||||
{
|
||||
name: dbInfraConfigs[0].name as InfraConfigEnum,
|
||||
value: dbInfraConfigs[0].value,
|
||||
},
|
||||
{
|
||||
name: dbInfraConfigs[1].name as InfraConfigEnum,
|
||||
value: dbInfraConfigs[1].value,
|
||||
},
|
||||
];
|
||||
|
||||
beforeEach(() => {
|
||||
mockReset(mockPrisma);
|
||||
});
|
||||
|
||||
describe('InfraConfigService', () => {
|
||||
describe('update', () => {
|
||||
it('should update the infra config without backend server restart', async () => {
|
||||
const name = InfraConfigEnum.GOOGLE_CLIENT_ID;
|
||||
const value = 'true';
|
||||
|
||||
mockPrisma.infraConfig.update.mockResolvedValueOnce({
|
||||
id: '',
|
||||
name,
|
||||
value,
|
||||
active: true,
|
||||
createdOn: new Date(),
|
||||
updatedOn: new Date(),
|
||||
});
|
||||
|
||||
jest.spyOn(helper, 'stopApp').mockReturnValueOnce();
|
||||
const result = await infraConfigService.update(name, value);
|
||||
|
||||
expect(helper.stopApp).not.toHaveBeenCalled();
|
||||
expect(result).toEqualRight({ name, value });
|
||||
});
|
||||
|
||||
it('should update the infra config with backend server restart', async () => {
|
||||
const name = InfraConfigEnum.GOOGLE_CLIENT_ID;
|
||||
const value = 'true';
|
||||
|
||||
mockPrisma.infraConfig.update.mockResolvedValueOnce({
|
||||
id: '',
|
||||
name,
|
||||
value,
|
||||
active: true,
|
||||
createdOn: new Date(),
|
||||
updatedOn: new Date(),
|
||||
});
|
||||
jest.spyOn(helper, 'stopApp').mockReturnValueOnce();
|
||||
|
||||
const result = await infraConfigService.update(name, value, true);
|
||||
|
||||
expect(helper.stopApp).toHaveBeenCalledTimes(1);
|
||||
expect(result).toEqualRight({ name, value });
|
||||
});
|
||||
|
||||
it('should update the infra config', async () => {
|
||||
const name = InfraConfigEnum.GOOGLE_CLIENT_ID;
|
||||
const value = 'true';
|
||||
@@ -144,7 +71,7 @@ describe('InfraConfigService', () => {
|
||||
|
||||
describe('get', () => {
|
||||
it('should get the infra config', async () => {
|
||||
const name = InfraConfigEnum.GOOGLE_CLIENT_ID;
|
||||
const name = InfraConfigEnumForClient.GOOGLE_CLIENT_ID;
|
||||
const value = 'true';
|
||||
|
||||
mockPrisma.infraConfig.findUniqueOrThrow.mockResolvedValueOnce({
|
||||
@@ -160,7 +87,7 @@ describe('InfraConfigService', () => {
|
||||
});
|
||||
|
||||
it('should pass correct params to prisma findUnique', async () => {
|
||||
const name = InfraConfigEnum.GOOGLE_CLIENT_ID;
|
||||
const name = InfraConfigEnumForClient.GOOGLE_CLIENT_ID;
|
||||
|
||||
await infraConfigService.get(name);
|
||||
|
||||
@@ -171,7 +98,7 @@ describe('InfraConfigService', () => {
|
||||
});
|
||||
|
||||
it('should throw an error if the infra config does not exist', async () => {
|
||||
const name = InfraConfigEnum.GOOGLE_CLIENT_ID;
|
||||
const name = InfraConfigEnumForClient.GOOGLE_CLIENT_ID;
|
||||
|
||||
mockPrisma.infraConfig.findUniqueOrThrow.mockRejectedValueOnce('null');
|
||||
|
||||
@@ -179,45 +106,4 @@ describe('InfraConfigService', () => {
|
||||
expect(result).toEqualLeft(INFRA_CONFIG_NOT_FOUND);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMany', () => {
|
||||
it('should throw error if any disallowed names are provided', async () => {
|
||||
const disallowedNames = [InfraConfigEnum.VITE_ALLOWED_AUTH_PROVIDERS];
|
||||
const result = await infraConfigService.getMany(disallowedNames);
|
||||
|
||||
expect(result).toEqualLeft(INFRA_CONFIG_OPERATION_NOT_ALLOWED);
|
||||
});
|
||||
it('should resolve right with disallowed names if `checkDisallowed` parameter passed', async () => {
|
||||
const disallowedNames = [InfraConfigEnum.VITE_ALLOWED_AUTH_PROVIDERS];
|
||||
|
||||
const dbInfraConfigResponses = dbInfraConfigs.filter((dbConfig) =>
|
||||
disallowedNames.includes(dbConfig.name as InfraConfigEnum),
|
||||
);
|
||||
mockPrisma.infraConfig.findMany.mockResolvedValueOnce(
|
||||
dbInfraConfigResponses,
|
||||
);
|
||||
|
||||
const result = await infraConfigService.getMany(disallowedNames, false);
|
||||
|
||||
expect(result).toEqualRight(
|
||||
infraConfigs.filter((i) => disallowedNames.includes(i.name)),
|
||||
);
|
||||
});
|
||||
|
||||
it('should return right with infraConfigs if Prisma query succeeds', async () => {
|
||||
const allowedNames = [InfraConfigEnum.GOOGLE_CLIENT_ID];
|
||||
|
||||
const dbInfraConfigResponses = dbInfraConfigs.filter((dbConfig) =>
|
||||
allowedNames.includes(dbConfig.name as InfraConfigEnum),
|
||||
);
|
||||
mockPrisma.infraConfig.findMany.mockResolvedValueOnce(
|
||||
dbInfraConfigResponses,
|
||||
);
|
||||
|
||||
const result = await infraConfigService.getMany(allowedNames);
|
||||
expect(result).toEqualRight(
|
||||
infraConfigs.filter((i) => allowedNames.includes(i.name)),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,25 +3,28 @@ import { InfraConfig } from './infra-config.model';
|
||||
import { PrismaService } from 'src/prisma/prisma.service';
|
||||
import { InfraConfig as DBInfraConfig } from '@prisma/client';
|
||||
import * as E from 'fp-ts/Either';
|
||||
import { InfraConfigEnum } from 'src/types/InfraConfig';
|
||||
import {
|
||||
InfraConfigEnum,
|
||||
InfraConfigEnumForClient,
|
||||
} from 'src/types/InfraConfig';
|
||||
import {
|
||||
AUTH_PROVIDER_NOT_SPECIFIED,
|
||||
DATABASE_TABLE_NOT_EXIST,
|
||||
INFRA_CONFIG_INVALID_INPUT,
|
||||
INFRA_CONFIG_NOT_FOUND,
|
||||
INFRA_CONFIG_NOT_LISTED,
|
||||
INFRA_CONFIG_RESET_FAILED,
|
||||
INFRA_CONFIG_UPDATE_FAILED,
|
||||
INFRA_CONFIG_SERVICE_NOT_CONFIGURED,
|
||||
INFRA_CONFIG_OPERATION_NOT_ALLOWED,
|
||||
} from 'src/errors';
|
||||
import {
|
||||
throwErr,
|
||||
validateSMTPEmail,
|
||||
validateSMTPUrl,
|
||||
validateUrl,
|
||||
} from 'src/utils';
|
||||
import { throwErr, validateSMTPEmail, validateSMTPUrl } from 'src/utils';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { ServiceStatus, getDefaultInfraConfigs, stopApp } from './helper';
|
||||
import {
|
||||
ServiceStatus,
|
||||
generateAnalyticsUserId,
|
||||
getConfiguredSSOProviders,
|
||||
stopApp,
|
||||
} from './helper';
|
||||
import { EnableAndDisableSSOArgs, InfraConfigArgs } from './input-args';
|
||||
import { AuthProvider } from 'src/auth/helper';
|
||||
|
||||
@@ -32,32 +35,84 @@ export class InfraConfigService implements OnModuleInit {
|
||||
private readonly configService: ConfigService,
|
||||
) {}
|
||||
|
||||
// Following fields are not updatable by `infraConfigs` Mutation. Use dedicated mutations for these fields instead.
|
||||
EXCLUDE_FROM_UPDATE_CONFIGS = [
|
||||
InfraConfigEnum.VITE_ALLOWED_AUTH_PROVIDERS,
|
||||
InfraConfigEnum.ALLOW_ANALYTICS_COLLECTION,
|
||||
InfraConfigEnum.ANALYTICS_USER_ID,
|
||||
InfraConfigEnum.IS_FIRST_TIME_INFRA_SETUP,
|
||||
];
|
||||
// Following fields can not be fetched by `infraConfigs` Query. Use dedicated queries for these fields instead.
|
||||
EXCLUDE_FROM_FETCH_CONFIGS = [
|
||||
InfraConfigEnum.VITE_ALLOWED_AUTH_PROVIDERS,
|
||||
InfraConfigEnum.ANALYTICS_USER_ID,
|
||||
InfraConfigEnum.IS_FIRST_TIME_INFRA_SETUP,
|
||||
];
|
||||
|
||||
async onModuleInit() {
|
||||
await this.initializeInfraConfigTable();
|
||||
}
|
||||
|
||||
async getDefaultInfraConfigs(): Promise<
|
||||
{ name: InfraConfigEnum; value: string }[]
|
||||
> {
|
||||
// Prepare rows for 'infra_config' table with default values (from .env) for each 'name'
|
||||
const infraConfigDefaultObjs: { name: InfraConfigEnum; value: string }[] = [
|
||||
{
|
||||
name: InfraConfigEnum.MAILER_SMTP_URL,
|
||||
value: process.env.MAILER_SMTP_URL,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.MAILER_ADDRESS_FROM,
|
||||
value: process.env.MAILER_ADDRESS_FROM,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.GOOGLE_CLIENT_ID,
|
||||
value: process.env.GOOGLE_CLIENT_ID,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.GOOGLE_CLIENT_SECRET,
|
||||
value: process.env.GOOGLE_CLIENT_SECRET,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.GITHUB_CLIENT_ID,
|
||||
value: process.env.GITHUB_CLIENT_ID,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.GITHUB_CLIENT_SECRET,
|
||||
value: process.env.GITHUB_CLIENT_SECRET,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.MICROSOFT_CLIENT_ID,
|
||||
value: process.env.MICROSOFT_CLIENT_ID,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.MICROSOFT_CLIENT_SECRET,
|
||||
value: process.env.MICROSOFT_CLIENT_SECRET,
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.VITE_ALLOWED_AUTH_PROVIDERS,
|
||||
value: getConfiguredSSOProviders(),
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.ALLOW_ANALYTICS_COLLECTION,
|
||||
value: false.toString(),
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.ANALYTICS_USER_ID,
|
||||
value: generateAnalyticsUserId(),
|
||||
},
|
||||
{
|
||||
name: InfraConfigEnum.IS_FIRST_TIME_INFRA_SETUP,
|
||||
value: (await this.prisma.infraConfig.count()) === 0 ? 'true' : 'false',
|
||||
},
|
||||
];
|
||||
|
||||
return infraConfigDefaultObjs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the 'infra_config' table with values from .env
|
||||
* @description This function create rows 'infra_config' in very first time (only once)
|
||||
*/
|
||||
async initializeInfraConfigTable() {
|
||||
try {
|
||||
// Get all the 'names' of the properties to be saved in the 'infra_config' table
|
||||
const enumValues = Object.values(InfraConfigEnum);
|
||||
|
||||
// Fetch the default values (value in .env) for configs to be saved in 'infra_config' table
|
||||
const infraConfigDefaultObjs = await getDefaultInfraConfigs();
|
||||
const infraConfigDefaultObjs = await this.getDefaultInfraConfigs();
|
||||
|
||||
// Check if all the 'names' are listed in the default values
|
||||
if (enumValues.length !== infraConfigDefaultObjs.length) {
|
||||
throw new Error(INFRA_CONFIG_NOT_LISTED);
|
||||
}
|
||||
|
||||
// Eliminate the rows (from 'infraConfigDefaultObjs') that are already present in the database table
|
||||
const dbInfraConfigs = await this.prisma.infraConfig.findMany();
|
||||
@@ -114,7 +169,11 @@ export class InfraConfigService implements OnModuleInit {
|
||||
* @param restartEnabled If true, restart the app after updating the InfraConfig
|
||||
* @returns InfraConfig model
|
||||
*/
|
||||
async update(name: InfraConfigEnum, value: string, restartEnabled = false) {
|
||||
async update(
|
||||
name: InfraConfigEnumForClient | InfraConfigEnum,
|
||||
value: string,
|
||||
restartEnabled = false,
|
||||
) {
|
||||
const isValidate = this.validateEnvValues([{ name, value }]);
|
||||
if (E.isLeft(isValidate)) return E.left(isValidate.left);
|
||||
|
||||
@@ -138,11 +197,6 @@ export class InfraConfigService implements OnModuleInit {
|
||||
* @returns InfraConfig model
|
||||
*/
|
||||
async updateMany(infraConfigs: InfraConfigArgs[]) {
|
||||
for (let i = 0; i < infraConfigs.length; i++) {
|
||||
if (this.EXCLUDE_FROM_UPDATE_CONFIGS.includes(infraConfigs[i].name))
|
||||
return E.left(INFRA_CONFIG_OPERATION_NOT_ALLOWED);
|
||||
}
|
||||
|
||||
const isValidate = this.validateEnvValues(infraConfigs);
|
||||
if (E.isLeft(isValidate)) return E.left(isValidate.left);
|
||||
|
||||
@@ -176,26 +230,12 @@ export class InfraConfigService implements OnModuleInit {
|
||||
) {
|
||||
switch (service) {
|
||||
case AuthProvider.GOOGLE:
|
||||
return (
|
||||
configMap.GOOGLE_CLIENT_ID &&
|
||||
configMap.GOOGLE_CLIENT_SECRET &&
|
||||
configMap.GOOGLE_CALLBACK_URL &&
|
||||
configMap.GOOGLE_SCOPE
|
||||
);
|
||||
return configMap.GOOGLE_CLIENT_ID && configMap.GOOGLE_CLIENT_SECRET;
|
||||
case AuthProvider.GITHUB:
|
||||
return (
|
||||
configMap.GITHUB_CLIENT_ID &&
|
||||
configMap.GITHUB_CLIENT_SECRET &&
|
||||
configMap.GITHUB_CALLBACK_URL &&
|
||||
configMap.GITHUB_SCOPE
|
||||
);
|
||||
return configMap.GITHUB_CLIENT_ID && configMap.GITHUB_CLIENT_SECRET;
|
||||
case AuthProvider.MICROSOFT:
|
||||
return (
|
||||
configMap.MICROSOFT_CLIENT_ID &&
|
||||
configMap.MICROSOFT_CLIENT_SECRET &&
|
||||
configMap.MICROSOFT_CALLBACK_URL &&
|
||||
configMap.MICROSOFT_SCOPE &&
|
||||
configMap.MICROSOFT_TENANT
|
||||
configMap.MICROSOFT_CLIENT_ID && configMap.MICROSOFT_CLIENT_SECRET
|
||||
);
|
||||
case AuthProvider.EMAIL:
|
||||
return configMap.MAILER_SMTP_URL && configMap.MAILER_ADDRESS_FROM;
|
||||
@@ -270,7 +310,7 @@ export class InfraConfigService implements OnModuleInit {
|
||||
* @param name Name of the InfraConfig
|
||||
* @returns InfraConfig model
|
||||
*/
|
||||
async get(name: InfraConfigEnum) {
|
||||
async get(name: InfraConfigEnumForClient) {
|
||||
try {
|
||||
const infraConfig = await this.prisma.infraConfig.findUniqueOrThrow({
|
||||
where: { name },
|
||||
@@ -287,15 +327,7 @@ export class InfraConfigService implements OnModuleInit {
|
||||
* @param names Names of the InfraConfigs
|
||||
* @returns InfraConfig model
|
||||
*/
|
||||
async getMany(names: InfraConfigEnum[], checkDisallowedKeys: boolean = true) {
|
||||
if (checkDisallowedKeys) {
|
||||
// Check if the names are allowed to fetch by client
|
||||
for (let i = 0; i < names.length; i++) {
|
||||
if (this.EXCLUDE_FROM_FETCH_CONFIGS.includes(names[i]))
|
||||
return E.left(INFRA_CONFIG_OPERATION_NOT_ALLOWED);
|
||||
}
|
||||
}
|
||||
|
||||
async getMany(names: InfraConfigEnumForClient[]) {
|
||||
try {
|
||||
const infraConfigs = await this.prisma.infraConfig.findMany({
|
||||
where: { name: { in: names } },
|
||||
@@ -322,7 +354,7 @@ export class InfraConfigService implements OnModuleInit {
|
||||
*/
|
||||
async reset() {
|
||||
try {
|
||||
const infraConfigDefaultObjs = await getDefaultInfraConfigs();
|
||||
const infraConfigDefaultObjs = await this.getDefaultInfraConfigs();
|
||||
|
||||
await this.prisma.infraConfig.deleteMany({
|
||||
where: { name: { in: infraConfigDefaultObjs.map((p) => p.name) } },
|
||||
@@ -355,60 +387,36 @@ export class InfraConfigService implements OnModuleInit {
|
||||
*/
|
||||
validateEnvValues(
|
||||
infraConfigs: {
|
||||
name: InfraConfigEnum;
|
||||
name: InfraConfigEnumForClient | InfraConfigEnum;
|
||||
value: string;
|
||||
}[],
|
||||
) {
|
||||
for (let i = 0; i < infraConfigs.length; i++) {
|
||||
switch (infraConfigs[i].name) {
|
||||
case InfraConfigEnum.MAILER_SMTP_URL:
|
||||
case InfraConfigEnumForClient.MAILER_SMTP_URL:
|
||||
const isValidUrl = validateSMTPUrl(infraConfigs[i].value);
|
||||
if (!isValidUrl) return E.left(INFRA_CONFIG_INVALID_INPUT);
|
||||
break;
|
||||
case InfraConfigEnum.MAILER_ADDRESS_FROM:
|
||||
case InfraConfigEnumForClient.MAILER_ADDRESS_FROM:
|
||||
const isValidEmail = validateSMTPEmail(infraConfigs[i].value);
|
||||
if (!isValidEmail) return E.left(INFRA_CONFIG_INVALID_INPUT);
|
||||
break;
|
||||
case InfraConfigEnum.GOOGLE_CLIENT_ID:
|
||||
case InfraConfigEnumForClient.GOOGLE_CLIENT_ID:
|
||||
if (!infraConfigs[i].value) return E.left(INFRA_CONFIG_INVALID_INPUT);
|
||||
break;
|
||||
case InfraConfigEnum.GOOGLE_CLIENT_SECRET:
|
||||
case InfraConfigEnumForClient.GOOGLE_CLIENT_SECRET:
|
||||
if (!infraConfigs[i].value) return E.left(INFRA_CONFIG_INVALID_INPUT);
|
||||
break;
|
||||
case InfraConfigEnum.GOOGLE_CALLBACK_URL:
|
||||
if (!validateUrl(infraConfigs[i].value))
|
||||
return E.left(INFRA_CONFIG_INVALID_INPUT);
|
||||
break;
|
||||
case InfraConfigEnum.GOOGLE_SCOPE:
|
||||
case InfraConfigEnumForClient.GITHUB_CLIENT_ID:
|
||||
if (!infraConfigs[i].value) return E.left(INFRA_CONFIG_INVALID_INPUT);
|
||||
break;
|
||||
case InfraConfigEnum.GITHUB_CLIENT_ID:
|
||||
case InfraConfigEnumForClient.GITHUB_CLIENT_SECRET:
|
||||
if (!infraConfigs[i].value) return E.left(INFRA_CONFIG_INVALID_INPUT);
|
||||
break;
|
||||
case InfraConfigEnum.GITHUB_CLIENT_SECRET:
|
||||
case InfraConfigEnumForClient.MICROSOFT_CLIENT_ID:
|
||||
if (!infraConfigs[i].value) return E.left(INFRA_CONFIG_INVALID_INPUT);
|
||||
break;
|
||||
case InfraConfigEnum.GITHUB_CALLBACK_URL:
|
||||
if (!validateUrl(infraConfigs[i].value))
|
||||
return E.left(INFRA_CONFIG_INVALID_INPUT);
|
||||
break;
|
||||
case InfraConfigEnum.GITHUB_SCOPE:
|
||||
if (!infraConfigs[i].value) return E.left(INFRA_CONFIG_INVALID_INPUT);
|
||||
break;
|
||||
case InfraConfigEnum.MICROSOFT_CLIENT_ID:
|
||||
if (!infraConfigs[i].value) return E.left(INFRA_CONFIG_INVALID_INPUT);
|
||||
break;
|
||||
case InfraConfigEnum.MICROSOFT_CLIENT_SECRET:
|
||||
if (!infraConfigs[i].value) return E.left(INFRA_CONFIG_INVALID_INPUT);
|
||||
break;
|
||||
case InfraConfigEnum.MICROSOFT_CALLBACK_URL:
|
||||
if (!validateUrl(infraConfigs[i].value))
|
||||
return E.left(INFRA_CONFIG_INVALID_INPUT);
|
||||
break;
|
||||
case InfraConfigEnum.MICROSOFT_SCOPE:
|
||||
if (!infraConfigs[i].value) return E.left(INFRA_CONFIG_INVALID_INPUT);
|
||||
break;
|
||||
case InfraConfigEnum.MICROSOFT_TENANT:
|
||||
case InfraConfigEnumForClient.MICROSOFT_CLIENT_SECRET:
|
||||
if (!infraConfigs[i].value) return E.left(INFRA_CONFIG_INVALID_INPUT);
|
||||
break;
|
||||
default:
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import { Field, InputType } from '@nestjs/graphql';
|
||||
import { InfraConfigEnum } from 'src/types/InfraConfig';
|
||||
import { InfraConfigEnumForClient } from 'src/types/InfraConfig';
|
||||
import { ServiceStatus } from './helper';
|
||||
import { AuthProvider } from 'src/auth/helper';
|
||||
|
||||
@InputType()
|
||||
export class InfraConfigArgs {
|
||||
@Field(() => InfraConfigEnum, {
|
||||
@Field(() => InfraConfigEnumForClient, {
|
||||
description: 'Infra Config Name',
|
||||
})
|
||||
name: InfraConfigEnum;
|
||||
name: InfraConfigEnumForClient;
|
||||
|
||||
@Field({
|
||||
description: 'Infra Config Value',
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
// Type of data returned from the query to obtain all search results
|
||||
export type SearchQueryReturnType = {
|
||||
id: string;
|
||||
title: string;
|
||||
type: 'collection' | 'request';
|
||||
method?: string;
|
||||
};
|
||||
|
||||
// Type of data returned from the query to obtain all parents
|
||||
export type ParentTreeQueryReturnType = {
|
||||
id: string;
|
||||
parentID: string;
|
||||
title: string;
|
||||
};
|
||||
@@ -1,38 +0,0 @@
|
||||
import { Controller, Get, Param, Query, UseGuards } from '@nestjs/common';
|
||||
import { TeamCollectionService } from './team-collection.service';
|
||||
import * as E from 'fp-ts/Either';
|
||||
import { ThrottlerBehindProxyGuard } from 'src/guards/throttler-behind-proxy.guard';
|
||||
import { JwtAuthGuard } from 'src/auth/guards/jwt-auth.guard';
|
||||
import { RequiresTeamRole } from 'src/team/decorators/requires-team-role.decorator';
|
||||
import { TeamMemberRole } from '@prisma/client';
|
||||
import { RESTTeamMemberGuard } from 'src/team/guards/rest-team-member.guard';
|
||||
import { throwHTTPErr } from 'src/utils';
|
||||
|
||||
@UseGuards(ThrottlerBehindProxyGuard)
|
||||
@Controller({ path: 'team-collection', version: '1' })
|
||||
export class TeamCollectionController {
|
||||
constructor(private readonly teamCollectionService: TeamCollectionService) {}
|
||||
|
||||
@Get('search/:teamID/:searchQuery')
|
||||
@RequiresTeamRole(
|
||||
TeamMemberRole.VIEWER,
|
||||
TeamMemberRole.EDITOR,
|
||||
TeamMemberRole.OWNER,
|
||||
)
|
||||
@UseGuards(JwtAuthGuard, RESTTeamMemberGuard)
|
||||
async searchByTitle(
|
||||
@Param('searchQuery') searchQuery: string,
|
||||
@Param('teamID') teamID: string,
|
||||
@Query('take') take: string,
|
||||
@Query('skip') skip: string,
|
||||
) {
|
||||
const res = await this.teamCollectionService.searchByTitle(
|
||||
searchQuery,
|
||||
teamID,
|
||||
parseInt(take),
|
||||
parseInt(skip),
|
||||
);
|
||||
if (E.isLeft(res)) throwHTTPErr(res.left);
|
||||
return res.right;
|
||||
}
|
||||
}
|
||||
@@ -6,7 +6,6 @@ import { GqlCollectionTeamMemberGuard } from './guards/gql-collection-team-membe
|
||||
import { TeamModule } from '../team/team.module';
|
||||
import { UserModule } from '../user/user.module';
|
||||
import { PubSubModule } from '../pubsub/pubsub.module';
|
||||
import { TeamCollectionController } from './team-collection.controller';
|
||||
|
||||
@Module({
|
||||
imports: [PrismaModule, TeamModule, UserModule, PubSubModule],
|
||||
@@ -16,6 +15,5 @@ import { TeamCollectionController } from './team-collection.controller';
|
||||
GqlCollectionTeamMemberGuard,
|
||||
],
|
||||
exports: [TeamCollectionService, GqlCollectionTeamMemberGuard],
|
||||
controllers: [TeamCollectionController],
|
||||
})
|
||||
export class TeamCollectionModule {}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { HttpStatus, Injectable } from '@nestjs/common';
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { PrismaService } from '../prisma/prisma.service';
|
||||
import { TeamCollection } from './team-collection.model';
|
||||
import {
|
||||
@@ -14,10 +14,6 @@ import {
|
||||
TEAM_COL_SAME_NEXT_COLL,
|
||||
TEAM_COL_REORDERING_FAILED,
|
||||
TEAM_COLL_DATA_INVALID,
|
||||
TEAM_REQ_SEARCH_FAILED,
|
||||
TEAM_COL_SEARCH_FAILED,
|
||||
TEAM_REQ_PARENT_TREE_GEN_FAILED,
|
||||
TEAM_COLL_PARENT_TREE_GEN_FAILED,
|
||||
} from '../errors';
|
||||
import { PubSubService } from '../pubsub/pubsub.service';
|
||||
import { isValidLength } from 'src/utils';
|
||||
@@ -26,9 +22,6 @@ import * as O from 'fp-ts/Option';
|
||||
import { Prisma, TeamCollection as DBTeamCollection } from '@prisma/client';
|
||||
import { CollectionFolder } from 'src/types/CollectionFolder';
|
||||
import { stringToJson } from 'src/utils';
|
||||
import { CollectionSearchNode } from 'src/types/CollectionSearchNode';
|
||||
import { ParentTreeQueryReturnType, SearchQueryReturnType } from './helper';
|
||||
import { RESTError } from 'src/types/RESTError';
|
||||
|
||||
@Injectable()
|
||||
export class TeamCollectionService {
|
||||
@@ -1063,266 +1056,4 @@ export class TeamCollectionService {
|
||||
return E.left(TEAM_COLL_NOT_FOUND);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search for TeamCollections and TeamRequests by title
|
||||
*
|
||||
* @param searchQuery The search query
|
||||
* @param teamID The Team ID
|
||||
* @param take Number of items we want returned
|
||||
* @param skip Number of items we want to skip
|
||||
* @returns An Either of the search results
|
||||
*/
|
||||
async searchByTitle(
|
||||
searchQuery: string,
|
||||
teamID: string,
|
||||
take = 10,
|
||||
skip = 0,
|
||||
) {
|
||||
// Fetch all collections and requests that match the search query
|
||||
const searchResults: SearchQueryReturnType[] = [];
|
||||
|
||||
const matchedCollections = await this.searchCollections(
|
||||
searchQuery,
|
||||
teamID,
|
||||
take,
|
||||
skip,
|
||||
);
|
||||
if (E.isLeft(matchedCollections))
|
||||
return E.left(<RESTError>{
|
||||
message: matchedCollections.left,
|
||||
statusCode: HttpStatus.NOT_FOUND,
|
||||
});
|
||||
searchResults.push(...matchedCollections.right);
|
||||
|
||||
const matchedRequests = await this.searchRequests(
|
||||
searchQuery,
|
||||
teamID,
|
||||
take,
|
||||
skip,
|
||||
);
|
||||
if (E.isLeft(matchedRequests))
|
||||
return E.left(<RESTError>{
|
||||
message: matchedRequests.left,
|
||||
statusCode: HttpStatus.NOT_FOUND,
|
||||
});
|
||||
searchResults.push(...matchedRequests.right);
|
||||
|
||||
// Generate the parent tree for searchResults
|
||||
const searchResultsWithTree: CollectionSearchNode[] = [];
|
||||
|
||||
for (let i = 0; i < searchResults.length; i++) {
|
||||
const fetchedParentTree = await this.fetchParentTree(searchResults[i]);
|
||||
if (E.isLeft(fetchedParentTree))
|
||||
return E.left(<RESTError>{
|
||||
message: fetchedParentTree.left,
|
||||
statusCode: HttpStatus.NOT_FOUND,
|
||||
});
|
||||
searchResultsWithTree.push({
|
||||
type: searchResults[i].type,
|
||||
title: searchResults[i].title,
|
||||
method: searchResults[i].method,
|
||||
id: searchResults[i].id,
|
||||
path: !fetchedParentTree
|
||||
? []
|
||||
: ([fetchedParentTree.right] as CollectionSearchNode[]),
|
||||
});
|
||||
}
|
||||
|
||||
return E.right({ data: searchResultsWithTree });
|
||||
}
|
||||
|
||||
/**
|
||||
* Search for TeamCollections by title
|
||||
*
|
||||
* @param searchQuery The search query
|
||||
* @param teamID The Team ID
|
||||
* @param take Number of items we want returned
|
||||
* @param skip Number of items we want to skip
|
||||
* @returns An Either of the search results
|
||||
*/
|
||||
private async searchCollections(
|
||||
searchQuery: string,
|
||||
teamID: string,
|
||||
take: number,
|
||||
skip: number,
|
||||
) {
|
||||
const query = Prisma.sql`
|
||||
select id,title,'collection' AS type
|
||||
from "TeamCollection"
|
||||
where "TeamCollection"."teamID"=${teamID}
|
||||
and titlesearch @@ to_tsquery(${searchQuery})
|
||||
order by ts_rank(titlesearch,to_tsquery(${searchQuery}))
|
||||
limit ${take}
|
||||
OFFSET ${skip === 0 ? 0 : (skip - 1) * take};
|
||||
`;
|
||||
try {
|
||||
const res = await this.prisma.$queryRaw<SearchQueryReturnType[]>(query);
|
||||
return E.right(res);
|
||||
} catch (error) {
|
||||
return E.left(TEAM_COL_SEARCH_FAILED);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search for TeamRequests by title
|
||||
*
|
||||
* @param searchQuery The search query
|
||||
* @param teamID The Team ID
|
||||
* @param take Number of items we want returned
|
||||
* @param skip Number of items we want to skip
|
||||
* @returns An Either of the search results
|
||||
*/
|
||||
private async searchRequests(
|
||||
searchQuery: string,
|
||||
teamID: string,
|
||||
take: number,
|
||||
skip: number,
|
||||
) {
|
||||
const query = Prisma.sql`
|
||||
select id,title,request->>'method' as method,'request' AS type
|
||||
from "TeamRequest"
|
||||
where "TeamRequest"."teamID"=${teamID}
|
||||
and titlesearch @@ to_tsquery(${searchQuery})
|
||||
order by ts_rank(titlesearch,to_tsquery(${searchQuery}))
|
||||
limit ${take}
|
||||
OFFSET ${skip === 0 ? 0 : (skip - 1) * take};
|
||||
`;
|
||||
|
||||
try {
|
||||
const res = await this.prisma.$queryRaw<SearchQueryReturnType[]>(query);
|
||||
return E.right(res);
|
||||
} catch (error) {
|
||||
return E.left(TEAM_REQ_SEARCH_FAILED);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate the parent tree of a search result
|
||||
*
|
||||
* @param searchResult The search result for which we want to generate the parent tree
|
||||
* @returns The parent tree of the search result
|
||||
*/
|
||||
private async fetchParentTree(searchResult: SearchQueryReturnType) {
|
||||
return searchResult.type === 'collection'
|
||||
? await this.fetchCollectionParentTree(searchResult.id)
|
||||
: await this.fetchRequestParentTree(searchResult.id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate the parent tree of a collection
|
||||
*
|
||||
* @param id The ID of the collection
|
||||
* @returns The parent tree of the collection
|
||||
*/
|
||||
private async fetchCollectionParentTree(id: string) {
|
||||
try {
|
||||
const query = Prisma.sql`
|
||||
WITH RECURSIVE collection_tree AS (
|
||||
SELECT tc.id, tc."parentID", tc.title
|
||||
FROM "TeamCollection" AS tc
|
||||
JOIN "TeamCollection" AS tr ON tc.id = tr."parentID"
|
||||
WHERE tr.id = ${id}
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT parent.id, parent."parentID", parent.title
|
||||
FROM "TeamCollection" AS parent
|
||||
JOIN collection_tree AS ct ON parent.id = ct."parentID"
|
||||
)
|
||||
SELECT * FROM collection_tree;
|
||||
`;
|
||||
const res = await this.prisma.$queryRaw<ParentTreeQueryReturnType[]>(
|
||||
query,
|
||||
);
|
||||
|
||||
const collectionParentTree = this.generateParentTree(res);
|
||||
return E.right(collectionParentTree);
|
||||
} catch (error) {
|
||||
E.left(TEAM_COLL_PARENT_TREE_GEN_FAILED);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate the parent tree from the collections
|
||||
*
|
||||
* @param parentCollections The parent collections
|
||||
* @returns The parent tree of the parent collections
|
||||
*/
|
||||
private generateParentTree(parentCollections: ParentTreeQueryReturnType[]) {
|
||||
function findChildren(id) {
|
||||
const collection = parentCollections.filter((item) => item.id === id)[0];
|
||||
if (collection.parentID == null) {
|
||||
return {
|
||||
id: collection.id,
|
||||
title: collection.title,
|
||||
type: 'collection',
|
||||
path: [],
|
||||
};
|
||||
}
|
||||
|
||||
const res = {
|
||||
id: collection.id,
|
||||
title: collection.title,
|
||||
type: 'collection',
|
||||
path: findChildren(collection.parentID),
|
||||
};
|
||||
return res;
|
||||
}
|
||||
|
||||
if (parentCollections.length > 0) {
|
||||
if (parentCollections[0].parentID == null) {
|
||||
return {
|
||||
id: parentCollections[0].id,
|
||||
title: parentCollections[0].title,
|
||||
type: 'collection',
|
||||
path: [],
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
id: parentCollections[0].id,
|
||||
title: parentCollections[0].title,
|
||||
type: 'collection',
|
||||
path: findChildren(parentCollections[0].parentID),
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate the parent tree of a request
|
||||
*
|
||||
* @param id The ID of the request
|
||||
* @returns The parent tree of the request
|
||||
*/
|
||||
private async fetchRequestParentTree(id: string) {
|
||||
try {
|
||||
const query = Prisma.sql`
|
||||
WITH RECURSIVE request_collection_tree AS (
|
||||
SELECT tc.id, tc."parentID", tc.title
|
||||
FROM "TeamCollection" AS tc
|
||||
JOIN "TeamRequest" AS tr ON tc.id = tr."collectionID"
|
||||
WHERE tr.id = ${id}
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT parent.id, parent."parentID", parent.title
|
||||
FROM "TeamCollection" AS parent
|
||||
JOIN request_collection_tree AS ct ON parent.id = ct."parentID"
|
||||
)
|
||||
SELECT * FROM request_collection_tree;
|
||||
|
||||
`;
|
||||
const res = await this.prisma.$queryRaw<ParentTreeQueryReturnType[]>(
|
||||
query,
|
||||
);
|
||||
|
||||
const requestParentTree = this.generateParentTree(res);
|
||||
return E.right(requestParentTree);
|
||||
} catch (error) {
|
||||
return E.left(TEAM_REQ_PARENT_TREE_GEN_FAILED);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
import { CanActivate, ExecutionContext, Injectable } from '@nestjs/common';
|
||||
import { Reflector } from '@nestjs/core';
|
||||
import { TeamService } from '../../team/team.service';
|
||||
import { TeamMemberRole } from '../../team/team.model';
|
||||
import {
|
||||
BUG_TEAM_NO_REQUIRE_TEAM_ROLE,
|
||||
BUG_AUTH_NO_USER_CTX,
|
||||
BUG_TEAM_NO_TEAM_ID,
|
||||
TEAM_MEMBER_NOT_FOUND,
|
||||
TEAM_NOT_REQUIRED_ROLE,
|
||||
} from 'src/errors';
|
||||
import { throwHTTPErr } from 'src/utils';
|
||||
|
||||
@Injectable()
|
||||
export class RESTTeamMemberGuard implements CanActivate {
|
||||
constructor(
|
||||
private readonly reflector: Reflector,
|
||||
private readonly teamService: TeamService,
|
||||
) {}
|
||||
|
||||
async canActivate(context: ExecutionContext): Promise<boolean> {
|
||||
const requireRoles = this.reflector.get<TeamMemberRole[]>(
|
||||
'requiresTeamRole',
|
||||
context.getHandler(),
|
||||
);
|
||||
if (!requireRoles)
|
||||
throwHTTPErr({ message: BUG_TEAM_NO_REQUIRE_TEAM_ROLE, statusCode: 400 });
|
||||
|
||||
const request = context.switchToHttp().getRequest();
|
||||
|
||||
const { user } = request;
|
||||
if (user == undefined)
|
||||
throwHTTPErr({ message: BUG_AUTH_NO_USER_CTX, statusCode: 400 });
|
||||
|
||||
const teamID = request.params.teamID;
|
||||
if (!teamID)
|
||||
throwHTTPErr({ message: BUG_TEAM_NO_TEAM_ID, statusCode: 400 });
|
||||
|
||||
const teamMember = await this.teamService.getTeamMember(teamID, user.uid);
|
||||
if (!teamMember)
|
||||
throwHTTPErr({ message: TEAM_MEMBER_NOT_FOUND, statusCode: 404 });
|
||||
|
||||
if (requireRoles.includes(teamMember.role)) return true;
|
||||
|
||||
throwHTTPErr({ message: TEAM_NOT_REQUIRED_ROLE, statusCode: 403 });
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,10 @@
|
||||
import { HttpStatus } from '@nestjs/common';
|
||||
|
||||
/**
|
||||
** Custom interface to handle errors for REST modules such as Auth, Admin modules
|
||||
** Custom interface to handle errors specific to Auth module
|
||||
** Since its REST we need to return the HTTP status code along with the error message
|
||||
*/
|
||||
export type RESTError = {
|
||||
export type AuthError = {
|
||||
message: string;
|
||||
statusCode: HttpStatus;
|
||||
};
|
||||
@@ -1,17 +0,0 @@
|
||||
// Response type of results from the search query
|
||||
export type CollectionSearchNode = {
|
||||
/** Encodes the hierarchy of where the node is **/
|
||||
path: CollectionSearchNode[];
|
||||
} & (
|
||||
| {
|
||||
type: 'request';
|
||||
title: string;
|
||||
method: string;
|
||||
id: string;
|
||||
}
|
||||
| {
|
||||
type: 'collection';
|
||||
title: string;
|
||||
id: string;
|
||||
}
|
||||
);
|
||||
@@ -4,19 +4,12 @@ export enum InfraConfigEnum {
|
||||
|
||||
GOOGLE_CLIENT_ID = 'GOOGLE_CLIENT_ID',
|
||||
GOOGLE_CLIENT_SECRET = 'GOOGLE_CLIENT_SECRET',
|
||||
GOOGLE_CALLBACK_URL = 'GOOGLE_CALLBACK_URL',
|
||||
GOOGLE_SCOPE = 'GOOGLE_SCOPE',
|
||||
|
||||
GITHUB_CLIENT_ID = 'GITHUB_CLIENT_ID',
|
||||
GITHUB_CLIENT_SECRET = 'GITHUB_CLIENT_SECRET',
|
||||
GITHUB_CALLBACK_URL = 'GITHUB_CALLBACK_URL',
|
||||
GITHUB_SCOPE = 'GITHUB_SCOPE',
|
||||
|
||||
MICROSOFT_CLIENT_ID = 'MICROSOFT_CLIENT_ID',
|
||||
MICROSOFT_CLIENT_SECRET = 'MICROSOFT_CLIENT_SECRET',
|
||||
MICROSOFT_CALLBACK_URL = 'MICROSOFT_CALLBACK_URL',
|
||||
MICROSOFT_SCOPE = 'MICROSOFT_SCOPE',
|
||||
MICROSOFT_TENANT = 'MICROSOFT_TENANT',
|
||||
|
||||
VITE_ALLOWED_AUTH_PROVIDERS = 'VITE_ALLOWED_AUTH_PROVIDERS',
|
||||
|
||||
@@ -24,3 +17,20 @@ export enum InfraConfigEnum {
|
||||
ANALYTICS_USER_ID = 'ANALYTICS_USER_ID',
|
||||
IS_FIRST_TIME_INFRA_SETUP = 'IS_FIRST_TIME_INFRA_SETUP',
|
||||
}
|
||||
|
||||
export enum InfraConfigEnumForClient {
|
||||
MAILER_SMTP_URL = 'MAILER_SMTP_URL',
|
||||
MAILER_ADDRESS_FROM = 'MAILER_ADDRESS_FROM',
|
||||
|
||||
GOOGLE_CLIENT_ID = 'GOOGLE_CLIENT_ID',
|
||||
GOOGLE_CLIENT_SECRET = 'GOOGLE_CLIENT_SECRET',
|
||||
|
||||
GITHUB_CLIENT_ID = 'GITHUB_CLIENT_ID',
|
||||
GITHUB_CLIENT_SECRET = 'GITHUB_CLIENT_SECRET',
|
||||
|
||||
MICROSOFT_CLIENT_ID = 'MICROSOFT_CLIENT_ID',
|
||||
MICROSOFT_CLIENT_SECRET = 'MICROSOFT_CLIENT_SECRET',
|
||||
|
||||
ALLOW_ANALYTICS_COLLECTION = 'ALLOW_ANALYTICS_COLLECTION',
|
||||
IS_FIRST_TIME_INFRA_SETUP = 'IS_FIRST_TIME_INFRA_SETUP',
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ExecutionContext, HttpException } from '@nestjs/common';
|
||||
import { ExecutionContext } from '@nestjs/common';
|
||||
import { Reflector } from '@nestjs/core';
|
||||
import { GqlExecutionContext } from '@nestjs/graphql';
|
||||
import { pipe } from 'fp-ts/lib/function';
|
||||
@@ -16,7 +16,6 @@ import {
|
||||
JSON_INVALID,
|
||||
} from './errors';
|
||||
import { AuthProvider } from './auth/helper';
|
||||
import { RESTError } from './types/RESTError';
|
||||
|
||||
/**
|
||||
* A workaround to throw an exception in an expression.
|
||||
@@ -28,15 +27,6 @@ export function throwErr(errMessage: string): never {
|
||||
throw new Error(errMessage);
|
||||
}
|
||||
|
||||
/**
|
||||
* This function allows throw to be used as an expression
|
||||
* @param errMessage Message present in the error message
|
||||
*/
|
||||
export function throwHTTPErr(errorData: RESTError): never {
|
||||
const { message, statusCode } = errorData;
|
||||
throw new HttpException(message, statusCode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Prints the given value to log and returns the same value.
|
||||
* Used for debugging functional pipelines.
|
||||
@@ -183,16 +173,6 @@ export const validateSMTPUrl = (url: string) => {
|
||||
return false;
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks to see if the URL is valid or not
|
||||
* @param url The URL to validate
|
||||
* @returns boolean
|
||||
*/
|
||||
export const validateUrl = (url: string) => {
|
||||
const urlRegex = /^(http|https):\/\/[^ "]+$/;
|
||||
return urlRegex.test(url);
|
||||
};
|
||||
|
||||
/**
|
||||
* String to JSON parser
|
||||
* @param {str} str The string to parse
|
||||
|
||||
@@ -315,8 +315,7 @@
|
||||
"proxy_error": "Proxy error",
|
||||
"script_fail": "Could not execute pre-request script",
|
||||
"something_went_wrong": "Something went wrong",
|
||||
"test_script_fail": "Could not execute post-request script",
|
||||
"reading_files": "Error while reading one or more files."
|
||||
"test_script_fail": "Could not execute post-request script"
|
||||
},
|
||||
"export": {
|
||||
"as_json": "Export as JSON",
|
||||
@@ -414,10 +413,7 @@
|
||||
"json_description": "Import collections from a Hoppscotch Collections JSON file",
|
||||
"postman_environment": "Postman Environment",
|
||||
"postman_environment_description": "Import Postman Environment from a JSON file",
|
||||
"title": "Import",
|
||||
"file_size_limit_exceeded_warning_multiple_files": "Chosen files exceed the recommended limit of 10MB. Only the first {files} selected will be imported",
|
||||
"file_size_limit_exceeded_warning_single_file": "The currently chosen file exceeds the recommended limit of 10MB. Please select another file.",
|
||||
"success": "Successfully imported"
|
||||
"title": "Import"
|
||||
},
|
||||
"inspections": {
|
||||
"description": "Inspect possible errors",
|
||||
|
||||
@@ -263,7 +263,7 @@ const HoppOpenAPIImporter: ImporterOrExporter = {
|
||||
step: UrlSource({
|
||||
caption: "import.from_url",
|
||||
onImportFromURL: async (content) => {
|
||||
const res = await hoppOpenAPIImporter([content])()
|
||||
const res = await hoppOpenAPIImporter(content)()
|
||||
|
||||
if (E.isRight(res)) {
|
||||
handleImportToStore(res.right)
|
||||
|
||||
@@ -694,7 +694,7 @@ class MyCollectionsAdapter implements SmartTreeAdapter<MyCollectionNode> {
|
||||
let target = collections[indexPaths.shift() as number]
|
||||
|
||||
while (indexPaths.length > 0)
|
||||
target = target?.folders[indexPaths.shift() as number]
|
||||
target = target.folders[indexPaths.shift() as number]
|
||||
|
||||
return target !== undefined ? target : null
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@ const PostmanEnvironmentsImport: ImporterOrExporter = {
|
||||
return
|
||||
}
|
||||
|
||||
handleImportToStore(res.right)
|
||||
handleImportToStore([res.right])
|
||||
|
||||
platform.analytics?.logEvent({
|
||||
type: "HOPP_IMPORT_ENVIRONMENT",
|
||||
@@ -166,14 +166,19 @@ const insomniaEnvironmentsImport: ImporterOrExporter = {
|
||||
return
|
||||
}
|
||||
|
||||
const globalEnvs = res.right.filter(
|
||||
const globalEnvIndex = res.right.findIndex(
|
||||
(env) => env.name === "Base Environment"
|
||||
)
|
||||
const otherEnvs = res.right.filter(
|
||||
(env) => env.name !== "Base Environment"
|
||||
)
|
||||
|
||||
handleImportToStore(otherEnvs, globalEnvs)
|
||||
const globalEnv =
|
||||
globalEnvIndex !== -1 ? res.right[globalEnvIndex] : undefined
|
||||
|
||||
// remove the global env from the environments array to prevent it from being imported twice
|
||||
if (globalEnvIndex !== -1) {
|
||||
res.right.splice(globalEnvIndex, 1)
|
||||
}
|
||||
|
||||
handleImportToStore(res.right, globalEnv)
|
||||
|
||||
platform.analytics?.logEvent({
|
||||
type: "HOPP_IMPORT_ENVIRONMENT",
|
||||
@@ -335,14 +340,14 @@ const showImportFailedError = () => {
|
||||
|
||||
const handleImportToStore = async (
|
||||
environments: Environment[],
|
||||
globalEnvs: NonSecretEnvironment[] = []
|
||||
globalEnv?: NonSecretEnvironment
|
||||
) => {
|
||||
// Add global envs to the store
|
||||
globalEnvs.forEach(({ variables }) => {
|
||||
variables.forEach(({ key, value, secret }) => {
|
||||
// if there's a global env, add them to the store
|
||||
if (globalEnv) {
|
||||
globalEnv.variables.forEach(({ key, value, secret }) =>
|
||||
addGlobalEnvVariable({ key, value, secret })
|
||||
})
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
if (props.environmentType === "MY_ENV") {
|
||||
appendEnvironments(environments)
|
||||
|
||||
@@ -13,7 +13,6 @@
|
||||
{{ t(`${caption}`) }}
|
||||
</span>
|
||||
</p>
|
||||
|
||||
<div
|
||||
class="flex flex-col ml-10 border border-dashed rounded border-dividerDark"
|
||||
>
|
||||
@@ -24,30 +23,15 @@
|
||||
type="file"
|
||||
class="p-4 cursor-pointer transition file:transition file:cursor-pointer text-secondary hover:text-secondaryDark file:mr-2 file:py-2 file:px-4 file:rounded file:border-0 file:text-secondary hover:file:text-secondaryDark file:bg-primaryLight hover:file:bg-primaryDark"
|
||||
:accept="acceptedFileTypes"
|
||||
multiple
|
||||
@change="onFileChange"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<p v-if="showFileSizeLimitExceededWarning" class="text-red-500 ml-10">
|
||||
<template v-if="importFilesCount">
|
||||
{{
|
||||
t("import.file_size_limit_exceeded_warning_multiple_files", {
|
||||
files:
|
||||
importFilesCount === 1 ? "file" : `${importFilesCount} files`,
|
||||
})
|
||||
}}
|
||||
</template>
|
||||
|
||||
<template v-else>
|
||||
{{ t("import.file_size_limit_exceeded_warning_single_file") }}
|
||||
</template>
|
||||
</p>
|
||||
<div>
|
||||
<HoppButtonPrimary
|
||||
class="w-full"
|
||||
:label="t('import.title')"
|
||||
:disabled="!hasFile || showFileSizeLimitExceededWarning"
|
||||
:disabled="!hasFile"
|
||||
@click="emit('importFromFile', fileContent)"
|
||||
/>
|
||||
</div>
|
||||
@@ -67,30 +51,16 @@ defineProps<{
|
||||
const t = useI18n()
|
||||
const toast = useToast()
|
||||
|
||||
const ALLOWED_FILE_SIZE_LIMIT = 10 // 10 MB
|
||||
|
||||
const importFilesCount = ref(0)
|
||||
|
||||
const hasFile = ref(false)
|
||||
const showFileSizeLimitExceededWarning = ref(false)
|
||||
const fileContent = ref<string[]>([])
|
||||
const fileContent = ref("")
|
||||
|
||||
const inputChooseFileToImportFrom = ref<HTMLInputElement | any>()
|
||||
|
||||
const emit = defineEmits<{
|
||||
(e: "importFromFile", content: string[]): void
|
||||
(e: "importFromFile", content: string): void
|
||||
}>()
|
||||
|
||||
const onFileChange = async () => {
|
||||
// Reset the state on entering the handler to avoid any stale state
|
||||
if (showFileSizeLimitExceededWarning.value) {
|
||||
showFileSizeLimitExceededWarning.value = false
|
||||
}
|
||||
|
||||
if (importFilesCount.value) {
|
||||
importFilesCount.value = 0
|
||||
}
|
||||
|
||||
const onFileChange = () => {
|
||||
const inputFileToImport = inputChooseFileToImportFrom.value
|
||||
|
||||
if (!inputFileToImport) {
|
||||
@@ -99,52 +69,27 @@ const onFileChange = async () => {
|
||||
}
|
||||
|
||||
if (!inputFileToImport.files || inputFileToImport.files.length === 0) {
|
||||
inputChooseFileToImportFrom.value = ""
|
||||
inputChooseFileToImportFrom.value[0].value = ""
|
||||
hasFile.value = false
|
||||
toast.show(t("action.choose_file").toString())
|
||||
return
|
||||
}
|
||||
|
||||
const readerPromises: Promise<string | null>[] = []
|
||||
const reader = new FileReader()
|
||||
|
||||
let totalFileSize = 0
|
||||
|
||||
for (let i = 0; i < inputFileToImport.files.length; i++) {
|
||||
const file = inputFileToImport.files[i]
|
||||
|
||||
totalFileSize += file.size / 1024 / 1024
|
||||
|
||||
if (totalFileSize > ALLOWED_FILE_SIZE_LIMIT) {
|
||||
showFileSizeLimitExceededWarning.value = true
|
||||
break
|
||||
reader.onload = ({ target }) => {
|
||||
const content = target!.result as string | null
|
||||
if (!content) {
|
||||
hasFile.value = false
|
||||
toast.show(t("action.choose_file").toString())
|
||||
return
|
||||
}
|
||||
|
||||
const reader = new FileReader()
|
||||
fileContent.value = content
|
||||
|
||||
readerPromises.push(
|
||||
new Promise((resolve, reject) => {
|
||||
reader.onload = () => resolve(reader.result as string | null)
|
||||
reader.onerror = reject
|
||||
reader.readAsText(file)
|
||||
})
|
||||
)
|
||||
hasFile.value = !!content?.length
|
||||
}
|
||||
|
||||
importFilesCount.value = readerPromises.length
|
||||
|
||||
const results = await Promise.allSettled(readerPromises)
|
||||
|
||||
const contentsArr = results
|
||||
.filter((result) => result.status === "fulfilled")
|
||||
.map((result) => (result as { value: string | null }).value)
|
||||
.filter(Boolean) as string[]
|
||||
|
||||
const errors = results.filter((result) => result.status === "rejected")
|
||||
if (errors.length) {
|
||||
toast.error(t("error.reading_files"))
|
||||
}
|
||||
|
||||
fileContent.value = contentsArr
|
||||
hasFile.value = contentsArr.length > 0
|
||||
reader.readAsText(inputFileToImport.files[0])
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -2,7 +2,6 @@ import { pipe, flow } from "fp-ts/function"
|
||||
import * as TE from "fp-ts/TaskEither"
|
||||
import * as O from "fp-ts/Option"
|
||||
import * as RA from "fp-ts/ReadonlyArray"
|
||||
import * as A from "fp-ts/Array"
|
||||
import { translateToNewRESTCollection, HoppCollection } from "@hoppscotch/data"
|
||||
import { isPlainObject as _isPlainObject } from "lodash-es"
|
||||
|
||||
@@ -10,13 +9,11 @@ import { IMPORTER_INVALID_FILE_FORMAT } from "."
|
||||
import { safeParseJSON } from "~/helpers/functional/json"
|
||||
import { translateToNewGQLCollection } from "@hoppscotch/data"
|
||||
|
||||
export const hoppRESTImporter = (content: string[]) =>
|
||||
export const hoppRESTImporter = (content: string) =>
|
||||
pipe(
|
||||
content,
|
||||
A.traverse(O.Applicative)((str) => safeParseJSON(str, true)),
|
||||
safeParseJSON(content),
|
||||
O.chain(
|
||||
flow(
|
||||
A.flatten,
|
||||
makeCollectionsArray,
|
||||
RA.map(validateCollection),
|
||||
O.sequenceArray,
|
||||
|
||||
@@ -8,35 +8,17 @@ import { IMPORTER_INVALID_FILE_FORMAT } from "."
|
||||
import { Environment } from "@hoppscotch/data"
|
||||
import { z } from "zod"
|
||||
|
||||
export const hoppEnvImporter = (contents: string[]) => {
|
||||
const parsedContents = contents.map((str) => safeParseJSON(str, true))
|
||||
export const hoppEnvImporter = (content: string) => {
|
||||
const parsedContent = safeParseJSON(content, true)
|
||||
|
||||
if (parsedContents.some((parsed) => O.isNone(parsed))) {
|
||||
// parse json from the environments string
|
||||
if (O.isNone(parsedContent)) {
|
||||
return TE.left(IMPORTER_INVALID_FILE_FORMAT)
|
||||
}
|
||||
|
||||
const parsedValues = parsedContents.flatMap((content) => {
|
||||
const unwrappedContent = O.toNullable(content) as Environment[] | null
|
||||
|
||||
if (unwrappedContent) {
|
||||
return unwrappedContent.map((contentEntry) => {
|
||||
return {
|
||||
...contentEntry,
|
||||
variables: contentEntry.variables?.map((valueEntry) => ({
|
||||
...valueEntry,
|
||||
...("value" in valueEntry
|
||||
? { value: String(valueEntry.value) }
|
||||
: {}),
|
||||
})),
|
||||
}
|
||||
})
|
||||
}
|
||||
return null
|
||||
})
|
||||
|
||||
const validationResult = z
|
||||
.array(entityReference(Environment))
|
||||
.safeParse(parsedValues)
|
||||
.safeParse(parsedContent.value)
|
||||
|
||||
if (!validationResult.success) {
|
||||
return TE.left(IMPORTER_INVALID_FILE_FORMAT)
|
||||
|
||||
@@ -3,10 +3,10 @@ import * as E from "fp-ts/Either"
|
||||
|
||||
// TODO: add zod validation
|
||||
export const hoppGqlCollectionsImporter = (
|
||||
contents: string[]
|
||||
content: string
|
||||
): E.Either<"INVALID_JSON", HoppCollection[]> => {
|
||||
return E.tryCatch(
|
||||
() => contents.flatMap((content) => JSON.parse(content)),
|
||||
() => JSON.parse(content) as HoppCollection[],
|
||||
() => "INVALID_JSON"
|
||||
)
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ import { v4 as uuidv4 } from "uuid"
|
||||
export function FileSource(metadata: {
|
||||
acceptedFileTypes: string
|
||||
caption: string
|
||||
onImportFromFile: (content: string[]) => any | Promise<any>
|
||||
onImportFromFile: (content: string) => any | Promise<any>
|
||||
}) {
|
||||
const stepID = uuidv4()
|
||||
|
||||
|
||||
@@ -10,14 +10,14 @@ import { v4 as uuidv4 } from "uuid"
|
||||
export function GistSource(metadata: {
|
||||
caption: string
|
||||
onImportFromGist: (
|
||||
importResult: E.Either<string, string[]>
|
||||
importResult: E.Either<string, string>
|
||||
) => any | Promise<any>
|
||||
}) {
|
||||
const stepID = uuidv4()
|
||||
|
||||
return defineStep(stepID, UrlImport, () => ({
|
||||
caption: metadata.caption,
|
||||
onImportFromURL: (gistResponse: Record<string, unknown>) => {
|
||||
onImportFromURL: (gistResponse) => {
|
||||
const fileSchema = z.object({
|
||||
files: z.record(z.object({ content: z.string() })),
|
||||
})
|
||||
@@ -29,11 +29,9 @@ export function GistSource(metadata: {
|
||||
return
|
||||
}
|
||||
|
||||
const contents = Object.values(parseResult.data.files).map(
|
||||
({ content }) => content
|
||||
)
|
||||
const content = Object.values(parseResult.data.files)[0].content
|
||||
|
||||
metadata.onImportFromGist(E.right(contents))
|
||||
metadata.onImportFromGist(E.right(content))
|
||||
},
|
||||
fetchLogic: fetchGistFromUrl,
|
||||
}))
|
||||
|
||||
@@ -1,21 +1,19 @@
|
||||
import { convert, ImportRequest } from "insomnia-importers"
|
||||
import { pipe } from "fp-ts/function"
|
||||
import {
|
||||
HoppCollection,
|
||||
HoppRESTAuth,
|
||||
HoppRESTHeader,
|
||||
HoppRESTParam,
|
||||
HoppRESTReqBody,
|
||||
HoppRESTRequest,
|
||||
knownContentTypes,
|
||||
makeCollection,
|
||||
makeRESTRequest,
|
||||
HoppCollection,
|
||||
makeCollection,
|
||||
} from "@hoppscotch/data"
|
||||
|
||||
import * as A from "fp-ts/Array"
|
||||
import * as TE from "fp-ts/TaskEither"
|
||||
import * as TO from "fp-ts/TaskOption"
|
||||
import { pipe } from "fp-ts/function"
|
||||
import { ImportRequest, convert } from "insomnia-importers"
|
||||
|
||||
import * as TE from "fp-ts/TaskEither"
|
||||
import { IMPORTER_INVALID_FILE_FORMAT } from "."
|
||||
import { replaceInsomniaTemplating } from "./insomniaEnv"
|
||||
|
||||
@@ -205,18 +203,15 @@ const getHoppFolder = (
|
||||
headers: [],
|
||||
})
|
||||
|
||||
const getHoppCollections = (docs: InsomniaDoc[]) => {
|
||||
return docs.flatMap((doc) => {
|
||||
return getFoldersIn(null, doc.data.resources).map((f) =>
|
||||
getHoppFolder(f, doc.data.resources)
|
||||
)
|
||||
})
|
||||
}
|
||||
const getHoppCollections = (doc: InsomniaDoc) =>
|
||||
getFoldersIn(null, doc.data.resources).map((f) =>
|
||||
getHoppFolder(f, doc.data.resources)
|
||||
)
|
||||
|
||||
export const hoppInsomniaImporter = (fileContents: string[]) =>
|
||||
export const hoppInsomniaImporter = (fileContent: string) =>
|
||||
pipe(
|
||||
fileContents,
|
||||
A.traverse(TO.ApplicativeSeq)(parseInsomniaDoc),
|
||||
fileContent,
|
||||
parseInsomniaDoc,
|
||||
TO.map(getHoppCollections),
|
||||
TE.fromTaskOption(() => IMPORTER_INVALID_FILE_FORMAT)
|
||||
)
|
||||
|
||||
@@ -29,36 +29,33 @@ export const replaceInsomniaTemplating = (expression: string) => {
|
||||
return expression.replaceAll(regex, "<<$1>>")
|
||||
}
|
||||
|
||||
export const insomniaEnvImporter = (contents: string[]) => {
|
||||
const parsedContents = contents.map((str) => safeParseJSONOrYAML(str))
|
||||
if (parsedContents.some((parsed) => O.isNone(parsed))) {
|
||||
export const insomniaEnvImporter = (content: string) => {
|
||||
const parsedContent = safeParseJSONOrYAML(content)
|
||||
|
||||
if (O.isNone(parsedContent)) {
|
||||
return TE.left(IMPORTER_INVALID_FILE_FORMAT)
|
||||
}
|
||||
|
||||
const parsedValues = parsedContents.map((parsed) => O.toNullable(parsed))
|
||||
|
||||
const validationResult = z
|
||||
.array(insomniaResourcesSchema)
|
||||
.safeParse(parsedValues)
|
||||
const validationResult = insomniaResourcesSchema.safeParse(
|
||||
parsedContent.value
|
||||
)
|
||||
|
||||
if (!validationResult.success) {
|
||||
return TE.left(IMPORTER_INVALID_FILE_FORMAT)
|
||||
}
|
||||
|
||||
const insomniaEnvs = validationResult.data.flatMap(({ resources }) => {
|
||||
return resources
|
||||
.filter((resource) => resource._type === "environment")
|
||||
.map((envResource) => {
|
||||
const envResourceData = envResource.data as Record<string, unknown>
|
||||
const stringifiedData: Record<string, string> = {}
|
||||
const insomniaEnvs = validationResult.data.resources
|
||||
.filter((resource) => resource._type === "environment")
|
||||
.map((envResource) => {
|
||||
const envResourceData = envResource.data as Record<string, unknown>
|
||||
const stringifiedData: Record<string, string> = {}
|
||||
|
||||
Object.keys(envResourceData).forEach((key) => {
|
||||
stringifiedData[key] = String(envResourceData[key])
|
||||
})
|
||||
|
||||
return { ...envResource, data: stringifiedData }
|
||||
Object.keys(envResourceData).forEach((key) => {
|
||||
stringifiedData[key] = String(envResourceData[key])
|
||||
})
|
||||
})
|
||||
|
||||
return { ...envResource, data: stringifiedData }
|
||||
})
|
||||
|
||||
const environments: NonSecretEnvironment[] = []
|
||||
|
||||
|
||||
@@ -584,28 +584,24 @@ const convertPathToHoppReqs = (
|
||||
RA.toArray
|
||||
)
|
||||
|
||||
const convertOpenApiDocsToHopp = (
|
||||
docs: OpenAPI.Document[]
|
||||
const convertOpenApiDocToHopp = (
|
||||
doc: OpenAPI.Document
|
||||
): TE.TaskEither<never, HoppCollection[]> => {
|
||||
const collections = docs.map((doc) => {
|
||||
const name = doc.info.title
|
||||
const name = doc.info.title
|
||||
|
||||
const paths = Object.entries(doc.paths ?? {})
|
||||
.map(([pathName, pathObj]) =>
|
||||
convertPathToHoppReqs(doc, pathName, pathObj)
|
||||
)
|
||||
.flat()
|
||||
const paths = Object.entries(doc.paths ?? {})
|
||||
.map(([pathName, pathObj]) => convertPathToHoppReqs(doc, pathName, pathObj))
|
||||
.flat()
|
||||
|
||||
return makeCollection({
|
||||
return TE.of([
|
||||
makeCollection({
|
||||
name,
|
||||
folders: [],
|
||||
requests: paths,
|
||||
auth: { authType: "inherit", authActive: true },
|
||||
headers: [],
|
||||
})
|
||||
})
|
||||
|
||||
return TE.of(collections)
|
||||
}),
|
||||
])
|
||||
}
|
||||
|
||||
const parseOpenAPIDocContent = (str: string) =>
|
||||
@@ -618,49 +614,29 @@ const parseOpenAPIDocContent = (str: string) =>
|
||||
)
|
||||
)
|
||||
|
||||
export const hoppOpenAPIImporter = (fileContents: string[]) =>
|
||||
export const hoppOpenAPIImporter = (fileContent: string) =>
|
||||
pipe(
|
||||
// See if we can parse JSON properly
|
||||
fileContents,
|
||||
A.traverse(O.Applicative)(parseOpenAPIDocContent),
|
||||
TE.fromOption(() => {
|
||||
return IMPORTER_INVALID_FILE_FORMAT
|
||||
}),
|
||||
fileContent,
|
||||
parseOpenAPIDocContent,
|
||||
TE.fromOption(() => IMPORTER_INVALID_FILE_FORMAT),
|
||||
// Try validating, else the importer is invalid file format
|
||||
TE.chainW((docArr) => {
|
||||
return pipe(
|
||||
TE.chainW((obj) =>
|
||||
pipe(
|
||||
TE.tryCatch(
|
||||
async () => {
|
||||
const resultDoc = []
|
||||
|
||||
for (const docObj of docArr) {
|
||||
const validatedDoc = await SwaggerParser.validate(docObj)
|
||||
resultDoc.push(validatedDoc)
|
||||
}
|
||||
|
||||
return resultDoc
|
||||
},
|
||||
() => SwaggerParser.validate(obj),
|
||||
() => IMPORTER_INVALID_FILE_FORMAT
|
||||
)
|
||||
)
|
||||
}),
|
||||
),
|
||||
// Deference the references
|
||||
TE.chainW((docArr) =>
|
||||
TE.chainW((obj) =>
|
||||
pipe(
|
||||
TE.tryCatch(
|
||||
async () => {
|
||||
const resultDoc = []
|
||||
|
||||
for (const docObj of docArr) {
|
||||
const validatedDoc = await SwaggerParser.dereference(docObj)
|
||||
resultDoc.push(validatedDoc)
|
||||
}
|
||||
|
||||
return resultDoc
|
||||
},
|
||||
() => SwaggerParser.dereference(obj),
|
||||
() => OPENAPI_DEREF_ERROR
|
||||
)
|
||||
)
|
||||
),
|
||||
TE.chainW(convertOpenApiDocsToHopp)
|
||||
TE.chainW(convertOpenApiDocToHopp)
|
||||
)
|
||||
|
||||
@@ -55,11 +55,7 @@ const readPMCollection = (def: string) =>
|
||||
pipe(
|
||||
def,
|
||||
safeParseJSON,
|
||||
O.chain((data) =>
|
||||
O.tryCatch(() => {
|
||||
return new PMCollection(data)
|
||||
})
|
||||
)
|
||||
O.chain((data) => O.tryCatch(() => new PMCollection(data)))
|
||||
)
|
||||
|
||||
const getHoppReqHeaders = (item: Item): HoppRESTHeader[] =>
|
||||
@@ -300,17 +296,15 @@ const getHoppFolder = (ig: ItemGroup<Item>): HoppCollection =>
|
||||
headers: [],
|
||||
})
|
||||
|
||||
export const getHoppCollections = (collections: PMCollection[]) => {
|
||||
return collections.map(getHoppFolder)
|
||||
}
|
||||
export const getHoppCollection = (coll: PMCollection) => getHoppFolder(coll)
|
||||
|
||||
export const hoppPostmanImporter = (fileContents: string[]) =>
|
||||
export const hoppPostmanImporter = (fileContent: string) =>
|
||||
pipe(
|
||||
// Try reading
|
||||
fileContents,
|
||||
A.traverse(O.Applicative)(readPMCollection),
|
||||
fileContent,
|
||||
readPMCollection,
|
||||
|
||||
O.map(flow(getHoppCollections)),
|
||||
O.map(flow(getHoppCollection, A.of)),
|
||||
|
||||
TE.fromOption(() => IMPORTER_INVALID_FILE_FORMAT)
|
||||
)
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import { Environment } from "@hoppscotch/data"
|
||||
import * as O from "fp-ts/Option"
|
||||
import * as TE from "fp-ts/TaskEither"
|
||||
import { uniqueId } from "lodash-es"
|
||||
import { z } from "zod"
|
||||
import * as O from "fp-ts/Option"
|
||||
|
||||
import { safeParseJSON } from "~/helpers/functional/json"
|
||||
import { IMPORTER_INVALID_FILE_FORMAT } from "."
|
||||
import { safeParseJSON } from "~/helpers/functional/json"
|
||||
|
||||
import { z } from "zod"
|
||||
import { Environment } from "@hoppscotch/data"
|
||||
import { uniqueId } from "lodash-es"
|
||||
|
||||
const postmanEnvSchema = z.object({
|
||||
name: z.string(),
|
||||
@@ -17,44 +18,32 @@ const postmanEnvSchema = z.object({
|
||||
),
|
||||
})
|
||||
|
||||
type PostmanEnv = z.infer<typeof postmanEnvSchema>
|
||||
export const postmanEnvImporter = (content: string) => {
|
||||
const parsedContent = safeParseJSON(content)
|
||||
|
||||
export const postmanEnvImporter = (contents: string[]) => {
|
||||
const parsedContents = contents.map((str) => safeParseJSON(str, true))
|
||||
if (parsedContents.some((parsed) => O.isNone(parsed))) {
|
||||
// parse json from the environments string
|
||||
if (O.isNone(parsedContent)) {
|
||||
return TE.left(IMPORTER_INVALID_FILE_FORMAT)
|
||||
}
|
||||
|
||||
const parsedValues = parsedContents.flatMap((parsed) => {
|
||||
const unwrappedEntry = O.toNullable(parsed) as PostmanEnv[] | null
|
||||
|
||||
if (unwrappedEntry) {
|
||||
return unwrappedEntry.map((entry) => ({
|
||||
...entry,
|
||||
values: entry.values?.map((valueEntry) => ({
|
||||
...valueEntry,
|
||||
value: String(valueEntry.value),
|
||||
})),
|
||||
}))
|
||||
}
|
||||
return null
|
||||
})
|
||||
|
||||
const validationResult = z.array(postmanEnvSchema).safeParse(parsedValues)
|
||||
const validationResult = postmanEnvSchema.safeParse(parsedContent.value)
|
||||
|
||||
if (!validationResult.success) {
|
||||
return TE.left(IMPORTER_INVALID_FILE_FORMAT)
|
||||
}
|
||||
|
||||
// Convert `values` to `variables` to match the format expected by the system
|
||||
const environments: Environment[] = validationResult.data.map(
|
||||
({ name, values }) => ({
|
||||
id: uniqueId(),
|
||||
v: 1,
|
||||
name,
|
||||
variables: values.map((entires) => ({ ...entires, secret: false })),
|
||||
})
|
||||
const postmanEnv = validationResult.data
|
||||
|
||||
const environment: Environment = {
|
||||
id: uniqueId(),
|
||||
v: 1,
|
||||
name: postmanEnv.name,
|
||||
variables: [],
|
||||
}
|
||||
|
||||
postmanEnv.values.forEach(({ key, value }) =>
|
||||
environment.variables.push({ key, value, secret: false })
|
||||
)
|
||||
|
||||
return TE.right(environments)
|
||||
return TE.right(environment)
|
||||
}
|
||||
|
||||
@@ -61,7 +61,7 @@ export function navigateToFolderWithIndexPath(
|
||||
|
||||
let target = collections[indexPaths.shift() as number]
|
||||
|
||||
while (indexPaths.length > 0 && target)
|
||||
while (indexPaths.length > 0)
|
||||
target = target.folders[indexPaths.shift() as number]
|
||||
|
||||
return target !== undefined ? target : null
|
||||
|
||||
@@ -95,15 +95,31 @@
|
||||
</template>
|
||||
|
||||
<script lang="ts" setup>
|
||||
import { ref, onMounted } from "vue"
|
||||
import { ref, onMounted, onBeforeUnmount } from "vue"
|
||||
import { safelyExtractRESTRequest } from "@hoppscotch/data"
|
||||
import { translateExtURLParams } from "~/helpers/RESTExtURLParams"
|
||||
import { useRoute } from "vue-router"
|
||||
import { useI18n } from "@composables/i18n"
|
||||
import { getDefaultRESTRequest } from "~/helpers/rest/default"
|
||||
import { defineActionHandler, invokeAction } from "~/helpers/actions"
|
||||
import { onLoggedIn } from "~/composables/auth"
|
||||
import { platform } from "~/platform"
|
||||
import {
|
||||
audit,
|
||||
BehaviorSubject,
|
||||
combineLatest,
|
||||
EMPTY,
|
||||
from,
|
||||
map,
|
||||
Subscription,
|
||||
} from "rxjs"
|
||||
import { useToast } from "~/composables/toast"
|
||||
import { watchDebounced } from "@vueuse/core"
|
||||
import { useReadonlyStream } from "~/composables/stream"
|
||||
import {
|
||||
changeCurrentSyncStatus,
|
||||
currentSyncingStatus$,
|
||||
} from "~/newstore/syncing"
|
||||
import { useService } from "dioc/vue"
|
||||
import { InspectionService } from "~/services/inspection"
|
||||
import { HeaderInspectorService } from "~/services/inspection/inspectors/header.inspector"
|
||||
@@ -111,7 +127,7 @@ import { EnvironmentInspectorService } from "~/services/inspection/inspectors/en
|
||||
import { ResponseInspectorService } from "~/services/inspection/inspectors/response.inspector"
|
||||
import { cloneDeep } from "lodash-es"
|
||||
import { RESTTabService } from "~/services/tab/rest"
|
||||
import { HoppTab } from "~/services/tab"
|
||||
import { HoppTab, PersistableTabState } from "~/services/tab"
|
||||
import { HoppRESTDocument } from "~/helpers/rest/document"
|
||||
|
||||
const savingRequest = ref(false)
|
||||
@@ -124,6 +140,7 @@ const exceptedTabID = ref<string | null>(null)
|
||||
const renameTabID = ref<string | null>(null)
|
||||
|
||||
const t = useI18n()
|
||||
const toast = useToast()
|
||||
|
||||
const tabs = useService(RESTTabService)
|
||||
|
||||
@@ -154,6 +171,12 @@ const contextMenu = ref<PopupDetails>({
|
||||
|
||||
const activeTabs = tabs.getActiveTabs()
|
||||
|
||||
const confirmSync = useReadonlyStream(currentSyncingStatus$, {
|
||||
isInitialSync: false,
|
||||
shouldSync: true,
|
||||
})
|
||||
const tabStateForSync = ref<PersistableTabState<HoppRESTDocument> | null>(null)
|
||||
|
||||
function bindRequestToURLParams() {
|
||||
const route = useRoute()
|
||||
// Get URL parameters and set that as the request
|
||||
@@ -304,6 +327,111 @@ const shareTabRequest = (tabID: string) => {
|
||||
}
|
||||
}
|
||||
|
||||
const syncTabState = () => {
|
||||
if (tabStateForSync.value)
|
||||
tabs.loadTabsFromPersistedState(tabStateForSync.value)
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs sync of the REST Tab session with Firestore.
|
||||
*
|
||||
* @returns A subscription to the sync observable stream.
|
||||
* Unsubscribe to stop syncing.
|
||||
*/
|
||||
function startTabStateSync(): Subscription {
|
||||
const currentUser$ = platform.auth.getCurrentUserStream()
|
||||
const tabState$ =
|
||||
new BehaviorSubject<PersistableTabState<HoppRESTDocument> | null>(null)
|
||||
|
||||
watchDebounced(
|
||||
tabs.persistableTabState,
|
||||
(state) => {
|
||||
tabState$.next(state)
|
||||
},
|
||||
{ debounce: 500, deep: true }
|
||||
)
|
||||
|
||||
const sub = combineLatest([currentUser$, tabState$])
|
||||
.pipe(
|
||||
map(([user, tabState]) =>
|
||||
user && tabState
|
||||
? from(platform.sync.tabState.writeCurrentTabState(user, tabState))
|
||||
: EMPTY
|
||||
),
|
||||
audit((x) => x)
|
||||
)
|
||||
.subscribe(() => {
|
||||
// NOTE: This subscription should be kept
|
||||
})
|
||||
|
||||
return sub
|
||||
}
|
||||
|
||||
const showSyncToast = () => {
|
||||
toast.show(t("confirm.sync"), {
|
||||
duration: 0,
|
||||
action: [
|
||||
{
|
||||
text: `${t("action.yes")}`,
|
||||
onClick: (_, toastObject) => {
|
||||
syncTabState()
|
||||
changeCurrentSyncStatus({
|
||||
isInitialSync: true,
|
||||
shouldSync: true,
|
||||
})
|
||||
toastObject.goAway(0)
|
||||
},
|
||||
},
|
||||
{
|
||||
text: `${t("action.no")}`,
|
||||
onClick: (_, toastObject) => {
|
||||
changeCurrentSyncStatus({
|
||||
isInitialSync: true,
|
||||
shouldSync: false,
|
||||
})
|
||||
toastObject.goAway(0)
|
||||
},
|
||||
},
|
||||
],
|
||||
})
|
||||
}
|
||||
|
||||
function setupTabStateSync() {
|
||||
const route = useRoute()
|
||||
|
||||
// Subscription to request sync
|
||||
let sub: Subscription | null = null
|
||||
|
||||
// Load request on login resolve and start sync
|
||||
onLoggedIn(async () => {
|
||||
if (
|
||||
Object.keys(route.query).length === 0 &&
|
||||
!(route.query.code || route.query.error)
|
||||
) {
|
||||
const tabStateFromSync =
|
||||
await platform.sync.tabState.loadTabStateFromSync()
|
||||
|
||||
if (tabStateFromSync && !confirmSync.value.isInitialSync) {
|
||||
tabStateForSync.value = tabStateFromSync
|
||||
showSyncToast()
|
||||
// Have to set isInitialSync to true here because the toast is shown
|
||||
// and the user does not click on any of the actions
|
||||
changeCurrentSyncStatus({
|
||||
isInitialSync: true,
|
||||
shouldSync: false,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
sub = startTabStateSync()
|
||||
})
|
||||
|
||||
// Stop subscription to stop syncing
|
||||
onBeforeUnmount(() => {
|
||||
sub?.unsubscribe()
|
||||
})
|
||||
}
|
||||
|
||||
defineActionHandler("contextmenu.open", ({ position, text }) => {
|
||||
if (text) {
|
||||
contextMenu.value = {
|
||||
@@ -320,6 +448,7 @@ defineActionHandler("contextmenu.open", ({ position, text }) => {
|
||||
}
|
||||
})
|
||||
|
||||
setupTabStateSync()
|
||||
bindRequestToURLParams()
|
||||
|
||||
defineActionHandler("rest.request.open", ({ doc }) => {
|
||||
|
||||
@@ -4,6 +4,7 @@ import { EnvironmentsPlatformDef } from "./environments"
|
||||
import { CollectionsPlatformDef } from "./collections"
|
||||
import { SettingsPlatformDef } from "./settings"
|
||||
import { HistoryPlatformDef } from "./history"
|
||||
import { TabStatePlatformDef } from "./tab"
|
||||
import { AnalyticsPlatformDef } from "./analytics"
|
||||
import { InterceptorsPlatformDef } from "./interceptors"
|
||||
import { HoppModule } from "~/modules"
|
||||
@@ -24,6 +25,7 @@ export type PlatformDef = {
|
||||
collections: CollectionsPlatformDef
|
||||
settings: SettingsPlatformDef
|
||||
history: HistoryPlatformDef
|
||||
tabState: TabStatePlatformDef
|
||||
}
|
||||
interceptors: InterceptorsPlatformDef
|
||||
additionalInspectors?: InspectorsPlatformDef
|
||||
|
||||
11
packages/hoppscotch-common/src/platform/tab.ts
Normal file
11
packages/hoppscotch-common/src/platform/tab.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { PersistableTabState } from "~/services/tab"
|
||||
import { HoppUser } from "./auth"
|
||||
import { HoppRESTDocument } from "~/helpers/rest/document"
|
||||
|
||||
export type TabStatePlatformDef = {
|
||||
loadTabStateFromSync: () => Promise<PersistableTabState<HoppRESTDocument> | null>
|
||||
writeCurrentTabState: (
|
||||
user: HoppUser,
|
||||
persistableTabState: PersistableTabState<HoppRESTDocument>
|
||||
) => Promise<void>
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
mutation UpdateUserSession(
|
||||
$currentSession: String!
|
||||
$sessionType: SessionType!
|
||||
) {
|
||||
updateUserSessions(
|
||||
currentSession: $currentSession
|
||||
sessionType: $sessionType
|
||||
) {
|
||||
currentRESTSession
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
query GetCurrentRESTSession {
|
||||
me {
|
||||
currentRESTSession
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import { def as environmentsDef } from "./platform/environments/environments.pla
|
||||
import { def as collectionsDef } from "./platform/collections/collections.platform"
|
||||
import { def as settingsDef } from "./platform/settings/settings.platform"
|
||||
import { def as historyDef } from "./platform/history/history.platform"
|
||||
import { def as tabStateDef } from "./platform/tabState/tabState.platform"
|
||||
import { proxyInterceptor } from "@hoppscotch/common/platform/std/interceptors/proxy"
|
||||
import { ExtensionInspectorService } from "@hoppscotch/common/platform/std/inspections/extension.inspector"
|
||||
import { NativeInterceptorService } from "./platform/interceptors/native"
|
||||
@@ -45,6 +46,7 @@ const headerPaddingTop = ref("0px")
|
||||
collections: collectionsDef,
|
||||
settings: settingsDef,
|
||||
history: historyDef,
|
||||
tabState: tabStateDef,
|
||||
},
|
||||
interceptors: {
|
||||
default: "native",
|
||||
@@ -95,18 +97,17 @@ const headerPaddingTop = ref("0px")
|
||||
}
|
||||
})()
|
||||
|
||||
|
||||
function isTextInput(target: EventTarget | null) {
|
||||
if (target instanceof HTMLInputElement) {
|
||||
return (
|
||||
target.type === "text" ||
|
||||
target.type === "email" ||
|
||||
target.type === "password" ||
|
||||
target.type === "number" ||
|
||||
target.type === "search" ||
|
||||
target.type === "tel" ||
|
||||
target.type === "url" ||
|
||||
target.type === "textarea"
|
||||
)
|
||||
return target.type === 'text'
|
||||
|| target.type === 'email'
|
||||
|| target.type === 'password'
|
||||
|| target.type === 'number'
|
||||
|| target.type === 'search'
|
||||
|| target.type === 'tel'
|
||||
|| target.type === 'url'
|
||||
|| target.type === 'textarea'
|
||||
} else if (target instanceof HTMLTextAreaElement) {
|
||||
return true
|
||||
} else if (target instanceof HTMLElement && target.isContentEditable) {
|
||||
@@ -116,12 +117,8 @@ function isTextInput(target: EventTarget | null) {
|
||||
return false
|
||||
}
|
||||
|
||||
window.addEventListener(
|
||||
"keydown",
|
||||
function (e) {
|
||||
if (e.key === "Backspace" && !isTextInput(e.target)) {
|
||||
e.preventDefault()
|
||||
}
|
||||
},
|
||||
true
|
||||
)
|
||||
window.addEventListener('keydown',function(e){
|
||||
if (e.key === "Backspace" && !isTextInput(e.target)) {
|
||||
e.preventDefault()
|
||||
}
|
||||
},true);
|
||||
|
||||
@@ -0,0 +1,36 @@
|
||||
import {
|
||||
runMutation,
|
||||
runGQLQuery,
|
||||
} from "@hoppscotch/common/helpers/backend/GQLClient"
|
||||
import {
|
||||
GetCurrentRestSessionDocument,
|
||||
GetCurrentRestSessionQuery,
|
||||
GetCurrentRestSessionQueryVariables,
|
||||
SessionType,
|
||||
UpdateUserSessionDocument,
|
||||
UpdateUserSessionMutation,
|
||||
UpdateUserSessionMutationVariables,
|
||||
} from "../../api/generated/graphql"
|
||||
|
||||
export const updateUserSession = (
|
||||
currentSession: string,
|
||||
sessionType: SessionType
|
||||
) =>
|
||||
runMutation<
|
||||
UpdateUserSessionMutation,
|
||||
UpdateUserSessionMutationVariables,
|
||||
""
|
||||
>(UpdateUserSessionDocument, {
|
||||
sessionType,
|
||||
currentSession,
|
||||
})()
|
||||
|
||||
export const getCurrentRestSession = () =>
|
||||
runGQLQuery<
|
||||
GetCurrentRestSessionQuery,
|
||||
GetCurrentRestSessionQueryVariables,
|
||||
""
|
||||
>({
|
||||
query: GetCurrentRestSessionDocument,
|
||||
variables: {},
|
||||
})
|
||||
@@ -0,0 +1,37 @@
|
||||
import { PersistableRESTTabState } from "@hoppscotch/common/helpers/rest/tab"
|
||||
import { HoppUser } from "@hoppscotch/common/platform/auth"
|
||||
import { TabStatePlatformDef } from "@hoppscotch/common/platform/tab"
|
||||
import { def as platformAuth } from "@platform/auth"
|
||||
import { getCurrentRestSession, updateUserSession } from "./tabState.api"
|
||||
import { SessionType } from "../../api/generated/graphql"
|
||||
import * as E from "fp-ts/Either"
|
||||
|
||||
async function writeCurrentTabState(
|
||||
_: HoppUser,
|
||||
persistableTabState: PersistableRESTTabState
|
||||
) {
|
||||
await updateUserSession(JSON.stringify(persistableTabState), SessionType.Rest)
|
||||
}
|
||||
|
||||
async function loadTabStateFromSync(): Promise<PersistableRESTTabState | null> {
|
||||
const currentUser = platformAuth.getCurrentUser()
|
||||
|
||||
if (!currentUser)
|
||||
throw new Error("Cannot load request from sync without login")
|
||||
|
||||
const res = await getCurrentRestSession()
|
||||
|
||||
if (E.isRight(res)) {
|
||||
const currentRESTSession = res.right.me.currentRESTSession
|
||||
|
||||
return currentRESTSession ? JSON.parse(currentRESTSession) : null
|
||||
} else {
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
export const def: TabStatePlatformDef = {
|
||||
loadTabStateFromSync,
|
||||
writeCurrentTabState,
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
mutation UpdateUserSession(
|
||||
$currentSession: String!
|
||||
$sessionType: SessionType!
|
||||
) {
|
||||
updateUserSessions(
|
||||
currentSession: $currentSession
|
||||
sessionType: $sessionType
|
||||
) {
|
||||
currentRESTSession
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
query GetCurrentRESTSession {
|
||||
me {
|
||||
currentRESTSession
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import { def as environmentsDef } from "./platform/environments/environments.pla
|
||||
import { def as collectionsDef } from "./platform/collections/collections.platform"
|
||||
import { def as settingsDef } from "./platform/settings/settings.platform"
|
||||
import { def as historyDef } from "./platform/history/history.platform"
|
||||
import { def as tabStateDef } from "./platform/tabState/tabState.platform"
|
||||
import { browserInterceptor } from "@hoppscotch/common/platform/std/interceptors/browser"
|
||||
import { proxyInterceptor } from "@hoppscotch/common/platform/std/interceptors/proxy"
|
||||
import { ExtensionInspectorService } from "@hoppscotch/common/platform/std/inspections/extension.inspector"
|
||||
@@ -24,6 +25,7 @@ createHoppApp("#app", {
|
||||
collections: collectionsDef,
|
||||
settings: settingsDef,
|
||||
history: historyDef,
|
||||
tabState: tabStateDef,
|
||||
},
|
||||
interceptors: {
|
||||
default: "browser",
|
||||
|
||||
@@ -0,0 +1,36 @@
|
||||
import {
|
||||
runMutation,
|
||||
runGQLQuery,
|
||||
} from "@hoppscotch/common/helpers/backend/GQLClient"
|
||||
import {
|
||||
GetCurrentRestSessionDocument,
|
||||
GetCurrentRestSessionQuery,
|
||||
GetCurrentRestSessionQueryVariables,
|
||||
SessionType,
|
||||
UpdateUserSessionDocument,
|
||||
UpdateUserSessionMutation,
|
||||
UpdateUserSessionMutationVariables,
|
||||
} from "../../api/generated/graphql"
|
||||
|
||||
export const updateUserSession = (
|
||||
currentSession: string,
|
||||
sessionType: SessionType
|
||||
) =>
|
||||
runMutation<
|
||||
UpdateUserSessionMutation,
|
||||
UpdateUserSessionMutationVariables,
|
||||
""
|
||||
>(UpdateUserSessionDocument, {
|
||||
sessionType,
|
||||
currentSession,
|
||||
})()
|
||||
|
||||
export const getCurrentRestSession = () =>
|
||||
runGQLQuery<
|
||||
GetCurrentRestSessionQuery,
|
||||
GetCurrentRestSessionQueryVariables,
|
||||
""
|
||||
>({
|
||||
query: GetCurrentRestSessionDocument,
|
||||
variables: {},
|
||||
})
|
||||
@@ -0,0 +1,38 @@
|
||||
import { PersistableTabState } from "@hoppscotch/common/services/tab"
|
||||
import { HoppRESTDocument } from "@hoppscotch/common/helpers/rest/document"
|
||||
import { HoppUser } from "@hoppscotch/common/platform/auth"
|
||||
import { TabStatePlatformDef } from "@hoppscotch/common/platform/tab"
|
||||
import { def as platformAuth } from "@platform/auth/auth.platform"
|
||||
import { getCurrentRestSession, updateUserSession } from "./tabState.api"
|
||||
import { SessionType } from "../../api/generated/graphql"
|
||||
import * as E from "fp-ts/Either"
|
||||
|
||||
async function writeCurrentTabState(
|
||||
_: HoppUser,
|
||||
persistableTabState: PersistableTabState<HoppRESTDocument>
|
||||
) {
|
||||
await updateUserSession(JSON.stringify(persistableTabState), SessionType.Rest)
|
||||
}
|
||||
|
||||
async function loadTabStateFromSync(): Promise<PersistableTabState<HoppRESTDocument> | null> {
|
||||
const currentUser = platformAuth.getCurrentUser()
|
||||
|
||||
if (!currentUser)
|
||||
throw new Error("Cannot load request from sync without login")
|
||||
|
||||
const res = await getCurrentRestSession()
|
||||
|
||||
if (E.isRight(res)) {
|
||||
const currentRESTSession = res.right.me.currentRESTSession
|
||||
|
||||
return currentRESTSession ? JSON.parse(currentRESTSession) : null
|
||||
} else {
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
export const def: TabStatePlatformDef = {
|
||||
loadTabStateFromSync,
|
||||
writeCurrentTabState,
|
||||
}
|
||||
@@ -156,11 +156,11 @@
|
||||
"self_host_docs": "Self Host Documentation",
|
||||
"send_magic_link": "Send magic link",
|
||||
"setup_failure": "Setup has failed!!",
|
||||
"setup_success": "Setup completed successfully!!",
|
||||
"setup_success": "Setup completed successful!!",
|
||||
"sign_in_agreement": "By signing in, you are agreeing to our",
|
||||
"sign_in_options": "All sign in option",
|
||||
"sign_out": "Sign out",
|
||||
"team_name_too_short": "Workspace name should be atleast 6 characters long!!",
|
||||
"team_name_long": "Workspace name should be atleast 6 characters long!!",
|
||||
"user_not_found": "User not found in the infra!!"
|
||||
},
|
||||
"teams": {
|
||||
|
||||
@@ -17,13 +17,19 @@ declare module '@vue/runtime-core' {
|
||||
HoppButtonPrimary: typeof import('@hoppscotch/ui')['HoppButtonPrimary']
|
||||
HoppButtonSecondary: typeof import('@hoppscotch/ui')['HoppButtonSecondary']
|
||||
HoppSmartAnchor: typeof import('@hoppscotch/ui')['HoppSmartAnchor']
|
||||
HoppSmartAutoComplete: typeof import('@hoppscotch/ui')['HoppSmartAutoComplete']
|
||||
HoppSmartConfirmModal: typeof import('@hoppscotch/ui')['HoppSmartConfirmModal']
|
||||
HoppSmartInput: typeof import('@hoppscotch/ui')['HoppSmartInput']
|
||||
HoppSmartItem: typeof import('@hoppscotch/ui')['HoppSmartItem']
|
||||
HoppSmartLink: typeof import('@hoppscotch/ui')['HoppSmartLink']
|
||||
HoppSmartModal: typeof import('@hoppscotch/ui')['HoppSmartModal']
|
||||
HoppSmartPicture: typeof import('@hoppscotch/ui')['HoppSmartPicture']
|
||||
HoppSmartSpinner: typeof import('@hoppscotch/ui')['HoppSmartSpinner']
|
||||
HoppSmartTab: typeof import('@hoppscotch/ui')['HoppSmartTab']
|
||||
HoppSmartTable: typeof import('@hoppscotch/ui')['HoppSmartTable']
|
||||
HoppSmartTabs: typeof import('@hoppscotch/ui')['HoppSmartTabs']
|
||||
HoppSmartToggle: typeof import('@hoppscotch/ui')['HoppSmartToggle']
|
||||
IconLucideChevronDown: typeof import('~icons/lucide/chevron-down')['default']
|
||||
IconLucideInbox: typeof import('~icons/lucide/inbox')['default']
|
||||
SettingsAuthProvider: typeof import('./components/settings/AuthProvider.vue')['default']
|
||||
SettingsConfigurations: typeof import('./components/settings/Configurations.vue')['default']
|
||||
|
||||
@@ -7,30 +7,33 @@
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div class="mx-8 md:col-span-2">
|
||||
<h4 class="font-semibold text-secondaryDark">
|
||||
{{ t('configs.data_sharing.title') }}
|
||||
</h4>
|
||||
<div class="mt-5 mx-8 md:col-span-2">
|
||||
<section v-if="dataSharingConfigs">
|
||||
<h4 class="font-semibold text-secondaryDark">
|
||||
{{ t('configs.data_sharing.title') }}
|
||||
</h4>
|
||||
|
||||
<div class="flex items-center space-y-4 py-4">
|
||||
<HoppSmartToggle
|
||||
:on="dataSharingConfigs.enabled"
|
||||
@change="dataSharingConfigs.enabled = !dataSharingConfigs.enabled"
|
||||
>
|
||||
{{ t('configs.data_sharing.toggle_description') }}
|
||||
</HoppSmartToggle>
|
||||
</div>
|
||||
<div class="space-y-4 py-4">
|
||||
<div class="flex items-center">
|
||||
<HoppSmartToggle
|
||||
:on="dataSharingConfigs.enabled"
|
||||
@change="dataSharingConfigs.enabled = !dataSharingConfigs.enabled"
|
||||
>
|
||||
{{ t('configs.data_sharing.toggle_description') }}
|
||||
</HoppSmartToggle>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- TODO: Update the link below -->
|
||||
<HoppButtonSecondary
|
||||
outline
|
||||
filled
|
||||
:icon="IconShieldQuestion"
|
||||
:label="t('configs.data_sharing.see_shared')"
|
||||
to="http://docs.hoppscotch.io"
|
||||
blank
|
||||
class="w-min my-2"
|
||||
/>
|
||||
<HoppButtonSecondary
|
||||
outline
|
||||
filled
|
||||
:icon="IconShieldQuestion"
|
||||
:label="t('configs.data_sharing.see_shared')"
|
||||
blank
|
||||
to="http://docs.hoppscotch.io"
|
||||
class="w-min my-2"
|
||||
/>
|
||||
</section>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
@@ -89,11 +89,11 @@ onMounted(async () => {
|
||||
);
|
||||
if (!authResult) return;
|
||||
|
||||
const dataSharingResult = await updateDataSharingConfigs(
|
||||
const dataSharingReult = await updateDataSharingConfigs(
|
||||
toggleDataSharingMutation
|
||||
);
|
||||
|
||||
if (!dataSharingResult) return;
|
||||
if (!dataSharingReult) return;
|
||||
}
|
||||
|
||||
restart.value = true;
|
||||
|
||||
@@ -4,31 +4,26 @@
|
||||
>
|
||||
<div class="flex items-center justify-center flex-col space-y-2">
|
||||
<h2 class="text-lg">{{ t('data_sharing.welcome') }}</h2>
|
||||
<img
|
||||
src="/assets/images/hoppscotch-title.svg"
|
||||
alt="hoppscotch-title"
|
||||
class="w-52"
|
||||
/>
|
||||
<img src="/assets/images/hoppscotch-title.svg" alt="" class="w-52" />
|
||||
</div>
|
||||
<div
|
||||
class="bg-primaryLight p-10 border-2 border-dividerLight rounded-lg flex flex-col space-y-8"
|
||||
>
|
||||
<div class="flex flex-col space-y-5 items-start">
|
||||
<div>
|
||||
<span>
|
||||
<p class="text-lg font-bold text-white">
|
||||
{{ t('data_sharing.title') }}
|
||||
</p>
|
||||
<p class="font-light">
|
||||
{{ t('data_sharing.description') }}
|
||||
</p>
|
||||
</div>
|
||||
</span>
|
||||
<HoppSmartToggle
|
||||
:on="dataSharingToggle"
|
||||
@change="dataSharingToggle = !dataSharingToggle"
|
||||
>
|
||||
{{ t('data_sharing.toggle_description') }}
|
||||
</HoppSmartToggle>
|
||||
<!-- TODO: Update link -->
|
||||
<HoppSmartAnchor
|
||||
blank
|
||||
to="http://docs.hoppscotch.io"
|
||||
@@ -37,12 +32,12 @@
|
||||
/>
|
||||
</div>
|
||||
<div class="flex flex-col items-start space-y-5">
|
||||
<div>
|
||||
<span>
|
||||
<p class="text-lg font-bold text-white">
|
||||
{{ t('newsletter.title') }}
|
||||
</p>
|
||||
<p class="font-light">{{ t('newsletter.description') }}</p>
|
||||
</div>
|
||||
</span>
|
||||
<HoppSmartToggle
|
||||
:on="newsletterToggle"
|
||||
@change="newsletterToggle = !newsletterToggle"
|
||||
@@ -50,7 +45,7 @@
|
||||
{{ t('newsletter.toggle_description') }}
|
||||
</HoppSmartToggle>
|
||||
</div>
|
||||
<div class="flex flex-col items-center space-y-5">
|
||||
<div class="flex flex-col space-y-5">
|
||||
<HoppButtonPrimary
|
||||
:icon="IconLogIn"
|
||||
:label="t('app.continue_to_dashboard')"
|
||||
@@ -76,8 +71,8 @@ import { useToast } from '~/composables/toast';
|
||||
import { auth } from '~/helpers/auth';
|
||||
import { listmonkApi } from '~/helpers/axiosConfig';
|
||||
import {
|
||||
ServiceStatus,
|
||||
ToggleAnalyticsCollectionDocument,
|
||||
ToggleAnalyticsCollectionMutationVariables,
|
||||
} from '~/helpers/backend/graphql';
|
||||
import IconBookOpenText from '~icons/lucide/book-open-text';
|
||||
import IconLogIn from '~icons/lucide/log-in';
|
||||
@@ -87,7 +82,7 @@ const toast = useToast();
|
||||
const user = auth.getCurrentUser();
|
||||
|
||||
const emit = defineEmits<{
|
||||
(event: 'setupComplete', status: boolean): void;
|
||||
(event: 'onSetupComplete', status: boolean): void;
|
||||
}>();
|
||||
|
||||
const dataSharingToggle = ref(true);
|
||||
@@ -97,11 +92,12 @@ const newsletterToggle = ref(true);
|
||||
const dataSharingMutation = useMutation(ToggleAnalyticsCollectionDocument);
|
||||
|
||||
const toggleDataSharing = async () => {
|
||||
const status = dataSharingToggle.value
|
||||
? ServiceStatus.Enable
|
||||
: ServiceStatus.Disable;
|
||||
const status = dataSharingToggle.value ? 'ENABLE' : 'DISABLE';
|
||||
|
||||
const result = await dataSharingMutation.executeMutation({ status });
|
||||
const variables = { status };
|
||||
const result = await dataSharingMutation.executeMutation(
|
||||
variables as ToggleAnalyticsCollectionMutationVariables
|
||||
);
|
||||
|
||||
if (result.error) {
|
||||
toast.error(t('state.data_sharing_failure'));
|
||||
@@ -129,13 +125,12 @@ const toggleNewsletter = async () => {
|
||||
|
||||
// Submit selections made
|
||||
const submitSelection = async () => {
|
||||
const dataSharingResult =
|
||||
dataSharingToggle.value && (await toggleDataSharing());
|
||||
const dataSharingResult = await toggleDataSharing();
|
||||
const newsletterResult = newsletterToggle.value && (await toggleNewsletter());
|
||||
|
||||
const setupDataComplete = !dataSharingToggle.value || dataSharingResult;
|
||||
const setupNewsletterComplete = !newsletterToggle.value || newsletterResult;
|
||||
|
||||
emit('setupComplete', setupDataComplete && setupNewsletterComplete);
|
||||
emit('onSetupComplete', setupDataComplete && setupNewsletterComplete);
|
||||
};
|
||||
</script>
|
||||
|
||||
@@ -25,26 +25,22 @@ export function useClientHandler<
|
||||
|
||||
const fetchData = async () => {
|
||||
fetching.value = true;
|
||||
try {
|
||||
const result = await client
|
||||
.query(query, {
|
||||
...variables,
|
||||
})
|
||||
.toPromise();
|
||||
|
||||
const result = await client
|
||||
.query(query, {
|
||||
...variables,
|
||||
})
|
||||
.toPromise();
|
||||
|
||||
if (result.error) {
|
||||
if (getList) {
|
||||
const resultList = getList(result.data!);
|
||||
dataAsList.value.push(...resultList);
|
||||
} else {
|
||||
data.value = result.data;
|
||||
}
|
||||
} catch (e) {
|
||||
error.value = true;
|
||||
fetching.value = false;
|
||||
return;
|
||||
}
|
||||
|
||||
if (getList) {
|
||||
const resultList = getList(result.data!);
|
||||
dataAsList.value.push(...resultList);
|
||||
} else {
|
||||
data.value = result.data;
|
||||
}
|
||||
|
||||
fetching.value = false;
|
||||
};
|
||||
|
||||
|
||||
@@ -281,16 +281,16 @@ export function useConfigHandler(updatedConfigs?: Config) {
|
||||
};
|
||||
|
||||
const AreAnyConfigFieldsEmpty = (config: Config): boolean => {
|
||||
const sections: Array<ConfigSection> = [
|
||||
config.providers.github,
|
||||
config.providers.google,
|
||||
config.providers.microsoft,
|
||||
config.mailConfigs,
|
||||
const sections: Array<[ConfigSection, boolean]> = [
|
||||
[config.providers.github, config.providers.github.enabled],
|
||||
[config.providers.google, config.providers.google.enabled],
|
||||
[config.providers.microsoft, config.providers.microsoft.enabled],
|
||||
[config.mailConfigs, config.mailConfigs.enabled],
|
||||
];
|
||||
|
||||
return sections.some(
|
||||
(section) =>
|
||||
section.enabled && Object.values(section.fields).some(isFieldEmpty)
|
||||
([section, enabled]) =>
|
||||
enabled && Object.values(section.fields).some(isFieldEmpty)
|
||||
);
|
||||
};
|
||||
|
||||
@@ -335,7 +335,7 @@ export function useConfigHandler(updatedConfigs?: Config) {
|
||||
};
|
||||
|
||||
// Updating the auth provider configurations
|
||||
const updateAuthProvider = (
|
||||
const updateAuthProvider = async (
|
||||
updateProviderStatus: UseMutationResponse<EnableAndDisableSsoMutation>
|
||||
) =>
|
||||
executeMutation(
|
||||
@@ -348,7 +348,7 @@ export function useConfigHandler(updatedConfigs?: Config) {
|
||||
);
|
||||
|
||||
// Updating the infra configurations
|
||||
const updateInfraConfigs = (
|
||||
const updateInfraConfigs = async (
|
||||
updateInfraConfigsMutation: UseMutationResponse<UpdateInfraConfigsMutation>
|
||||
) =>
|
||||
executeMutation(
|
||||
@@ -360,7 +360,7 @@ export function useConfigHandler(updatedConfigs?: Config) {
|
||||
);
|
||||
|
||||
// Resetting the infra configurations
|
||||
const resetInfraConfigs = (
|
||||
const resetInfraConfigs = async (
|
||||
resetInfraConfigsMutation: UseMutationResponse<ResetInfraConfigsMutation>
|
||||
) =>
|
||||
executeMutation(
|
||||
@@ -370,7 +370,7 @@ export function useConfigHandler(updatedConfigs?: Config) {
|
||||
);
|
||||
|
||||
// Updating the data sharing configurations
|
||||
const updateDataSharingConfigs = (
|
||||
const updateDataSharingConfigs = async (
|
||||
toggleDataSharingMutation: UseMutationResponse<ToggleAnalyticsCollectionMutation>
|
||||
) =>
|
||||
executeMutation(
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Ref, onMounted, ref } from 'vue';
|
||||
import { onMounted, ref } from 'vue';
|
||||
import { DocumentNode } from 'graphql';
|
||||
import { TypedDocumentNode, useClientHandle } from '@urql/vue';
|
||||
|
||||
@@ -16,41 +16,38 @@ export function usePagedQuery<
|
||||
const { client } = useClientHandle();
|
||||
const fetching = ref(true);
|
||||
const error = ref(false);
|
||||
const list: Ref<ListItem[]> = ref([]);
|
||||
const list = ref<ListItem[]>([]);
|
||||
const currentPage = ref(0);
|
||||
const hasNextPage = ref(true);
|
||||
|
||||
const fetchNextPage = async () => {
|
||||
fetching.value = true;
|
||||
|
||||
const cursor =
|
||||
list.value.length > 0 ? getCursor(list.value.at(-1)!) : undefined;
|
||||
const variablesForPagination = {
|
||||
...variables,
|
||||
take: itemsPerPage,
|
||||
cursor,
|
||||
};
|
||||
try {
|
||||
const cursor =
|
||||
list.value.length > 0 ? getCursor(list.value.at(-1)) : undefined;
|
||||
const variablesForPagination = {
|
||||
...variables,
|
||||
take: itemsPerPage,
|
||||
cursor,
|
||||
};
|
||||
|
||||
const result = await client
|
||||
.query(query, variablesForPagination)
|
||||
.toPromise();
|
||||
const result = await client
|
||||
.query(query, variablesForPagination)
|
||||
.toPromise();
|
||||
const resultList = getList(result.data!);
|
||||
|
||||
if (result.error) {
|
||||
if (resultList.length < itemsPerPage) {
|
||||
hasNextPage.value = false;
|
||||
}
|
||||
|
||||
list.value.push(...resultList);
|
||||
currentPage.value++;
|
||||
} catch (e) {
|
||||
error.value = true;
|
||||
} finally {
|
||||
fetching.value = false;
|
||||
return;
|
||||
}
|
||||
|
||||
const resultList = getList(result.data!);
|
||||
|
||||
if (resultList.length < itemsPerPage) {
|
||||
hasNextPage.value = false;
|
||||
}
|
||||
|
||||
list.value.push(...resultList);
|
||||
currentPage.value++;
|
||||
|
||||
fetching.value = false;
|
||||
};
|
||||
|
||||
onMounted(async () => {
|
||||
|
||||
@@ -67,7 +67,7 @@ const signOut = async (reloadWindow = false) => {
|
||||
});
|
||||
};
|
||||
|
||||
const getUserDetails = async () => {
|
||||
const getInitialUserDetails = async () => {
|
||||
const res = await authQuery.getUserDetails();
|
||||
return res.data;
|
||||
};
|
||||
@@ -80,7 +80,7 @@ const setUser = (user: HoppUser | null) => {
|
||||
|
||||
const setInitialUser = async () => {
|
||||
isGettingInitialUser.value = true;
|
||||
const res = await getUserDetails();
|
||||
const res = await getInitialUserDetails();
|
||||
|
||||
if (res.errors?.[0]) {
|
||||
const [error] = res.errors;
|
||||
@@ -154,7 +154,15 @@ export const auth = {
|
||||
getCurrentUserStream: () => currentUser$,
|
||||
getAuthEventsStream: () => authEvents$,
|
||||
getCurrentUser: () => currentUser$.value,
|
||||
getUserDetails,
|
||||
checkCurrentUser: async () => {
|
||||
try {
|
||||
const res = await authQuery.getUserDetails();
|
||||
return res.data.data.me;
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
||||
performAuthInit: () => {
|
||||
const currentUser = JSON.parse(getLocalConfig('login_state') ?? 'null');
|
||||
currentUser$.next(currentUser);
|
||||
|
||||
@@ -1,11 +1,15 @@
|
||||
import { auth } from '~/helpers/auth';
|
||||
import { UNAUTHORIZED } from '~/helpers/errors';
|
||||
import { HoppModule } from '.';
|
||||
|
||||
const isSetupRoute = (to: unknown) => to === 'setup';
|
||||
|
||||
const isGuestRoute = (to: unknown) => ['index', 'enter'].includes(to as string);
|
||||
|
||||
const getAdminStatus = async () => {
|
||||
const user = await auth.checkCurrentUser();
|
||||
return !!user?.isAdmin;
|
||||
};
|
||||
|
||||
const getFirstTimeInfraSetupStatus = async () => {
|
||||
const isInfraNotSetup = await auth.getFirstTimeInfraSetupStatus();
|
||||
return isInfraNotSetup;
|
||||
@@ -26,15 +30,12 @@ const getFirstTimeInfraSetupStatus = async () => {
|
||||
|
||||
export default <HoppModule>{
|
||||
async onBeforeRouteChange(to, _from, next) {
|
||||
const res = await auth.getUserDetails();
|
||||
const isAdmin = await getAdminStatus();
|
||||
|
||||
// Allow performing the silent refresh flow for an invalid access token state
|
||||
if (res.errors?.[0].message === UNAUTHORIZED) {
|
||||
return next();
|
||||
if (!isAdmin) {
|
||||
auth.signOutUser();
|
||||
}
|
||||
|
||||
const isAdmin = res.data?.me.isAdmin;
|
||||
|
||||
// Route Guards
|
||||
if (!isGuestRoute(to.name) && !isAdmin) {
|
||||
/**
|
||||
@@ -45,7 +46,7 @@ export default <HoppModule>{
|
||||
}
|
||||
|
||||
if (isAdmin) {
|
||||
// These route guards applies to the case where the user is logged in successfully and validated as an admin
|
||||
// This block applies to the case where the user is logged in successfully and validated as an admin
|
||||
const isInfraNotSetup = await getFirstTimeInfraSetupStatus();
|
||||
|
||||
/**
|
||||
@@ -56,6 +57,7 @@ export default <HoppModule>{
|
||||
const name = isInfraNotSetup ? 'setup' : 'dashboard';
|
||||
return next({ name });
|
||||
}
|
||||
|
||||
/**
|
||||
* Reroutes the user to the dashboard homepage if they have setup the infra already
|
||||
* and are trying to access the setup page
|
||||
@@ -63,10 +65,12 @@ export default <HoppModule>{
|
||||
if (isSetupRoute(to.name) && !isInfraNotSetup) {
|
||||
return next({ name: 'dashboard' });
|
||||
}
|
||||
|
||||
/**
|
||||
* Reroutes the user to the setup page if they have not setup the infra yet
|
||||
* and tries to access a valid route which is not a guest route
|
||||
*/
|
||||
|
||||
if (isInfraNotSetup && !isSetupRoute(to.name)) {
|
||||
return next({ name: 'setup' });
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
<template>
|
||||
<SetupDataSharingAndNewsletter
|
||||
@setup-complete="(status: boolean) => (isDataSharingAndNewsletterSetup = status)"
|
||||
@onSetupComplete="(status: boolean) => (isDataSharingAndNewsletterSetup = status)"
|
||||
/>
|
||||
</template>
|
||||
|
||||
|
||||
@@ -4,9 +4,7 @@
|
||||
<HoppSmartSpinner />
|
||||
</div>
|
||||
|
||||
<div v-else-if="error" class="text-lg">
|
||||
{{ t('teams.load_info_error') }}
|
||||
</div>
|
||||
<div v-else-if="error">{{ t('teams.load_info_error') }}</div>
|
||||
|
||||
<div v-else-if="team" class="flex flex-col">
|
||||
<div class="flex items-center space-x-4">
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
<template>
|
||||
<div v-if="fetching" class="flex justify-center"><HoppSmartSpinner /></div>
|
||||
<div v-else-if="error" class="text-lg">{{ t('users.load_info_error') }}</div>
|
||||
<div v-else-if="error">{{ t('users.load_info_error') }}</div>
|
||||
<div v-else-if="user" class="flex flex-col space-y-4">
|
||||
<div class="flex gap-x-3">
|
||||
<button
|
||||
|
||||
Reference in New Issue
Block a user