Compare commits

..

3 Commits

Author SHA1 Message Date
Gusram
2c0805fafe docs(desktop): update building instruction 2024-05-31 01:41:06 +08:00
Gusram
26b4f64824 chore(desktop): update dependencies version 2024-05-31 01:39:36 +08:00
Gusram
4156551b24 feat(desktop): implement backend wrapper auth 2024-05-31 00:39:19 +08:00
50 changed files with 501 additions and 1492 deletions

View File

@@ -47,6 +47,7 @@ RATE_LIMIT_MAX=100 # Max requests per IP
# Base URLs
VITE_BACKEND_LOGIN_API_URL=http://localhost:5444
VITE_BASE_URL=http://localhost:3000
VITE_SHORTCODE_BASE_URL=http://localhost:3000
VITE_ADMIN_URL=http://localhost:3100

View File

@@ -118,11 +118,11 @@ services:
restart: always
environment:
# Edit the below line to match your PostgresDB URL if you have an outside DB (make sure to update the .env file as well)
# - DATABASE_URL=postgresql://postgres:testpass@hoppscotch-db:5432/hoppscotch?connect_timeout=300
- DATABASE_URL=postgresql://postgres:testpass@hoppscotch-db:5432/hoppscotch?connect_timeout=300
- PORT=3000
volumes:
# Uncomment the line below when modifying code. Only applicable when using the "dev" target.
- ./packages/hoppscotch-backend/:/usr/src/app
# - ./packages/hoppscotch-backend/:/usr/src/app
- /usr/src/app/node_modules/
depends_on:
hoppscotch-db:

View File

@@ -1,2 +0,0 @@
-- AlterTable
ALTER TABLE "User" ADD COLUMN "lastLoggedOn" TIMESTAMP(3);

View File

@@ -1,19 +0,0 @@
-- CreateTable
CREATE TABLE "PersonalAccessToken" (
"id" TEXT NOT NULL,
"userUid" TEXT NOT NULL,
"label" TEXT NOT NULL,
"token" TEXT NOT NULL,
"expiresOn" TIMESTAMP(3),
"createdOn" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedOn" TIMESTAMP(3) NOT NULL,
CONSTRAINT "PersonalAccessToken_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "PersonalAccessToken_token_key" ON "PersonalAccessToken"("token");
-- AddForeignKey
ALTER TABLE "PersonalAccessToken" ADD CONSTRAINT "PersonalAccessToken_userUid_fkey" FOREIGN KEY ("userUid") REFERENCES "User"("uid") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -41,31 +41,31 @@ model TeamInvitation {
}
model TeamCollection {
id String @id @default(cuid())
id String @id @default(cuid())
parentID String?
data Json?
parent TeamCollection? @relation("TeamCollectionChildParent", fields: [parentID], references: [id])
children TeamCollection[] @relation("TeamCollectionChildParent")
parent TeamCollection? @relation("TeamCollectionChildParent", fields: [parentID], references: [id])
children TeamCollection[] @relation("TeamCollectionChildParent")
requests TeamRequest[]
teamID String
team Team @relation(fields: [teamID], references: [id], onDelete: Cascade)
team Team @relation(fields: [teamID], references: [id], onDelete: Cascade)
title String
orderIndex Int
createdOn DateTime @default(now()) @db.Timestamp(3)
updatedOn DateTime @updatedAt @db.Timestamp(3)
createdOn DateTime @default(now()) @db.Timestamp(3)
updatedOn DateTime @updatedAt @db.Timestamp(3)
}
model TeamRequest {
id String @id @default(cuid())
id String @id @default(cuid())
collectionID String
collection TeamCollection @relation(fields: [collectionID], references: [id], onDelete: Cascade)
collection TeamCollection @relation(fields: [collectionID], references: [id], onDelete: Cascade)
teamID String
team Team @relation(fields: [teamID], references: [id], onDelete: Cascade)
team Team @relation(fields: [teamID], references: [id], onDelete: Cascade)
title String
request Json
orderIndex Int
createdOn DateTime @default(now()) @db.Timestamp(3)
updatedOn DateTime @updatedAt @db.Timestamp(3)
createdOn DateTime @default(now()) @db.Timestamp(3)
updatedOn DateTime @updatedAt @db.Timestamp(3)
}
model Shortcode {
@@ -89,26 +89,24 @@ model TeamEnvironment {
}
model User {
uid String @id @default(cuid())
displayName String?
email String? @unique
photoURL String?
isAdmin Boolean @default(false)
refreshToken String?
providerAccounts Account[]
VerificationToken VerificationToken[]
settings UserSettings?
UserHistory UserHistory[]
UserEnvironments UserEnvironment[]
userCollections UserCollection[]
userRequests UserRequest[]
currentRESTSession Json?
currentGQLSession Json?
lastLoggedOn DateTime?
createdOn DateTime @default(now()) @db.Timestamp(3)
invitedUsers InvitedUsers[]
shortcodes Shortcode[]
personalAccessTokens PersonalAccessToken[]
uid String @id @default(cuid())
displayName String?
email String? @unique
photoURL String?
isAdmin Boolean @default(false)
refreshToken String?
providerAccounts Account[]
VerificationToken VerificationToken[]
settings UserSettings?
UserHistory UserHistory[]
UserEnvironments UserEnvironment[]
userCollections UserCollection[]
userRequests UserRequest[]
currentRESTSession Json?
currentGQLSession Json?
createdOn DateTime @default(now()) @db.Timestamp(3)
invitedUsers InvitedUsers[]
shortcodes Shortcode[]
}
model Account {
@@ -220,14 +218,3 @@ model InfraConfig {
createdOn DateTime @default(now()) @db.Timestamp(3)
updatedOn DateTime @updatedAt @db.Timestamp(3)
}
model PersonalAccessToken {
id String @id @default(cuid())
userUid String
user User @relation(fields: [userUid], references: [uid], onDelete: Cascade)
label String
token String @unique @default(uuid())
expiresOn DateTime? @db.Timestamp(3)
createdOn DateTime @default(now()) @db.Timestamp(3)
updatedOn DateTime @updatedAt @db.Timestamp(3)
}

View File

@@ -1,107 +0,0 @@
import {
BadRequestException,
Body,
Controller,
Delete,
Get,
HttpStatus,
Param,
ParseIntPipe,
Post,
Query,
UseGuards,
UseInterceptors,
} from '@nestjs/common';
import { AccessTokenService } from './access-token.service';
import { CreateAccessTokenDto } from './dto/create-access-token.dto';
import { JwtAuthGuard } from 'src/auth/guards/jwt-auth.guard';
import * as E from 'fp-ts/Either';
import { throwHTTPErr } from 'src/utils';
import { GqlUser } from 'src/decorators/gql-user.decorator';
import { AuthUser } from 'src/types/AuthUser';
import { ThrottlerBehindProxyGuard } from 'src/guards/throttler-behind-proxy.guard';
import { PATAuthGuard } from 'src/guards/rest-pat-auth.guard';
import { AccessTokenInterceptor } from 'src/interceptors/access-token.interceptor';
import { TeamEnvironmentsService } from 'src/team-environments/team-environments.service';
import { TeamCollectionService } from 'src/team-collection/team-collection.service';
import { ACCESS_TOKENS_INVALID_DATA_ID } from 'src/errors';
import { createCLIErrorResponse } from './helper';
@UseGuards(ThrottlerBehindProxyGuard)
@Controller({ path: 'access-tokens', version: '1' })
export class AccessTokenController {
constructor(
private readonly accessTokenService: AccessTokenService,
private readonly teamCollectionService: TeamCollectionService,
private readonly teamEnvironmentsService: TeamEnvironmentsService,
) {}
@Post('create')
@UseGuards(JwtAuthGuard)
async createPAT(
@GqlUser() user: AuthUser,
@Body() createAccessTokenDto: CreateAccessTokenDto,
) {
const result = await this.accessTokenService.createPAT(
createAccessTokenDto,
user,
);
if (E.isLeft(result)) throwHTTPErr(result.left);
return result.right;
}
@Delete('revoke')
@UseGuards(JwtAuthGuard)
async deletePAT(@Query('id') id: string) {
const result = await this.accessTokenService.deletePAT(id);
if (E.isLeft(result)) throwHTTPErr(result.left);
return result.right;
}
@Get('list')
@UseGuards(JwtAuthGuard)
async listAllUserPAT(
@GqlUser() user: AuthUser,
@Query('offset', ParseIntPipe) offset: number,
@Query('limit', ParseIntPipe) limit: number,
) {
return await this.accessTokenService.listAllUserPAT(
user.uid,
offset,
limit,
);
}
@Get('collection/:id')
@UseGuards(PATAuthGuard)
@UseInterceptors(AccessTokenInterceptor)
async fetchCollection(@GqlUser() user: AuthUser, @Param('id') id: string) {
const res = await this.teamCollectionService.getCollectionForCLI(
id,
user.uid,
);
if (E.isLeft(res))
throw new BadRequestException(
createCLIErrorResponse(ACCESS_TOKENS_INVALID_DATA_ID),
);
return res.right;
}
@Get('environment/:id')
@UseGuards(PATAuthGuard)
@UseInterceptors(AccessTokenInterceptor)
async fetchEnvironment(@GqlUser() user: AuthUser, @Param('id') id: string) {
const res = await this.teamEnvironmentsService.getTeamEnvironmentForCLI(
id,
user.uid,
);
if (E.isLeft(res))
throw new BadRequestException(
createCLIErrorResponse(ACCESS_TOKENS_INVALID_DATA_ID),
);
return res.right;
}
}

View File

@@ -1,20 +0,0 @@
import { Module } from '@nestjs/common';
import { AccessTokenController } from './access-token.controller';
import { PrismaModule } from 'src/prisma/prisma.module';
import { AccessTokenService } from './access-token.service';
import { TeamCollectionModule } from 'src/team-collection/team-collection.module';
import { TeamEnvironmentsModule } from 'src/team-environments/team-environments.module';
import { TeamModule } from 'src/team/team.module';
@Module({
imports: [
PrismaModule,
TeamCollectionModule,
TeamEnvironmentsModule,
TeamModule,
],
controllers: [AccessTokenController],
providers: [AccessTokenService],
exports: [AccessTokenService],
})
export class AccessTokenModule {}

View File

@@ -1,195 +0,0 @@
import { AccessTokenService } from './access-token.service';
import { mockDeep, mockReset } from 'jest-mock-extended';
import { PrismaService } from 'src/prisma/prisma.service';
import {
ACCESS_TOKEN_EXPIRY_INVALID,
ACCESS_TOKEN_LABEL_SHORT,
ACCESS_TOKEN_NOT_FOUND,
} from 'src/errors';
import { AuthUser } from 'src/types/AuthUser';
import { PersonalAccessToken } from '@prisma/client';
import { AccessToken } from 'src/types/AccessToken';
import { HttpStatus } from '@nestjs/common';
const mockPrisma = mockDeep<PrismaService>();
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const accessTokenService = new AccessTokenService(mockPrisma);
const currentTime = new Date();
const user: AuthUser = {
uid: '123344',
email: 'dwight@dundermifflin.com',
displayName: 'Dwight Schrute',
photoURL: 'https://en.wikipedia.org/wiki/Dwight_Schrute',
isAdmin: false,
refreshToken: 'hbfvdkhjbvkdvdfjvbnkhjb',
createdOn: currentTime,
currentGQLSession: {},
currentRESTSession: {},
lastLoggedOn: currentTime,
};
const PATCreatedOn = new Date();
const expiryInDays = 7;
const PATExpiresOn = new Date(
PATCreatedOn.getTime() + expiryInDays * 24 * 60 * 60 * 1000,
);
const userAccessToken: PersonalAccessToken = {
id: 'skfvhj8uvdfivb',
userUid: user.uid,
label: 'test',
token: '0140e328-b187-4823-ae4b-ed4bec832ac2',
expiresOn: PATExpiresOn,
createdOn: PATCreatedOn,
updatedOn: new Date(),
};
const userAccessTokenCasted: AccessToken = {
id: userAccessToken.id,
label: userAccessToken.label,
createdOn: userAccessToken.createdOn,
lastUsedOn: userAccessToken.updatedOn,
expiresOn: userAccessToken.expiresOn,
};
beforeEach(() => {
mockReset(mockPrisma);
});
describe('AccessTokenService', () => {
describe('createPAT', () => {
test('should throw ACCESS_TOKEN_LABEL_SHORT if label is too short', async () => {
const result = await accessTokenService.createPAT(
{
label: 'a',
expiryInDays: 7,
},
user,
);
expect(result).toEqualLeft({
message: ACCESS_TOKEN_LABEL_SHORT,
statusCode: HttpStatus.BAD_REQUEST,
});
});
test('should throw ACCESS_TOKEN_EXPIRY_INVALID if expiry date is invalid', async () => {
const result = await accessTokenService.createPAT(
{
label: 'test',
expiryInDays: 9,
},
user,
);
expect(result).toEqualLeft({
message: ACCESS_TOKEN_EXPIRY_INVALID,
statusCode: HttpStatus.BAD_REQUEST,
});
});
test('should successfully create a new Access Token', async () => {
mockPrisma.personalAccessToken.create.mockResolvedValueOnce(
userAccessToken,
);
const result = await accessTokenService.createPAT(
{
label: userAccessToken.label,
expiryInDays,
},
user,
);
expect(result).toEqualRight({
token: `pat-${userAccessToken.token}`,
info: userAccessTokenCasted,
});
});
});
describe('deletePAT', () => {
test('should throw ACCESS_TOKEN_NOT_FOUND if Access Token is not found', async () => {
mockPrisma.personalAccessToken.delete.mockRejectedValueOnce(
'RecordNotFound',
);
const result = await accessTokenService.deletePAT(userAccessToken.id);
expect(result).toEqualLeft({
message: ACCESS_TOKEN_NOT_FOUND,
statusCode: HttpStatus.NOT_FOUND,
});
});
test('should successfully delete a new Access Token', async () => {
mockPrisma.personalAccessToken.delete.mockResolvedValueOnce(
userAccessToken,
);
const result = await accessTokenService.deletePAT(userAccessToken.id);
expect(result).toEqualRight(true);
});
});
describe('listAllUserPAT', () => {
test('should successfully return a list of user Access Tokens', async () => {
mockPrisma.personalAccessToken.findMany.mockResolvedValueOnce([
userAccessToken,
]);
const result = await accessTokenService.listAllUserPAT(user.uid, 0, 10);
expect(result).toEqual([userAccessTokenCasted]);
});
});
describe('getUserPAT', () => {
test('should throw ACCESS_TOKEN_NOT_FOUND if Access Token is not found', async () => {
mockPrisma.personalAccessToken.findUniqueOrThrow.mockRejectedValueOnce(
'NotFoundError',
);
const result = await accessTokenService.getUserPAT(userAccessToken.token);
expect(result).toEqualLeft(ACCESS_TOKEN_NOT_FOUND);
});
test('should successfully return a user Access Tokens', async () => {
mockPrisma.personalAccessToken.findUniqueOrThrow.mockResolvedValueOnce({
...userAccessToken,
user,
} as any);
const result = await accessTokenService.getUserPAT(
`pat-${userAccessToken.token}`,
);
expect(result).toEqualRight({
user,
...userAccessToken,
} as any);
});
});
describe('updateLastUsedforPAT', () => {
test('should throw ACCESS_TOKEN_NOT_FOUND if Access Token is not found', async () => {
mockPrisma.personalAccessToken.update.mockRejectedValueOnce(
'RecordNotFound',
);
const result = await accessTokenService.updateLastUsedForPAT(
userAccessToken.token,
);
expect(result).toEqualLeft(ACCESS_TOKEN_NOT_FOUND);
});
test('should successfully update lastUsedOn for a user Access Tokens', async () => {
mockPrisma.personalAccessToken.update.mockResolvedValueOnce(
userAccessToken,
);
const result = await accessTokenService.updateLastUsedForPAT(
`pat-${userAccessToken.token}`,
);
expect(result).toEqualRight(userAccessTokenCasted);
});
});
});

View File

@@ -1,203 +0,0 @@
import { HttpStatus, Injectable } from '@nestjs/common';
import { PrismaService } from 'src/prisma/prisma.service';
import { CreateAccessTokenDto } from './dto/create-access-token.dto';
import { AuthUser } from 'src/types/AuthUser';
import { isValidLength } from 'src/utils';
import * as E from 'fp-ts/Either';
import {
ACCESS_TOKEN_EXPIRY_INVALID,
ACCESS_TOKEN_LABEL_SHORT,
ACCESS_TOKEN_NOT_FOUND,
} from 'src/errors';
import { CreateAccessTokenResponse } from './helper';
import { PersonalAccessToken } from '@prisma/client';
import { AccessToken } from 'src/types/AccessToken';
@Injectable()
export class AccessTokenService {
constructor(private readonly prisma: PrismaService) {}
TITLE_LENGTH = 3;
VALID_TOKEN_DURATIONS = [7, 30, 60, 90];
TOKEN_PREFIX = 'pat-';
/**
* Calculate the expiration date of the token
*
* @param expiresOn Number of days the token is valid for
* @returns Date object of the expiration date
*/
private calculateExpirationDate(expiresOn: null | number) {
if (expiresOn === null) return null;
return new Date(Date.now() + expiresOn * 24 * 60 * 60 * 1000);
}
/**
* Validate the expiration date of the token
*
* @param expiresOn Number of days the token is valid for
* @returns Boolean indicating if the expiration date is valid
*/
private validateExpirationDate(expiresOn: null | number) {
if (expiresOn === null || this.VALID_TOKEN_DURATIONS.includes(expiresOn))
return true;
return false;
}
/**
* Typecast a database PersonalAccessToken to a AccessToken model
* @param token database PersonalAccessToken
* @returns AccessToken model
*/
private cast(token: PersonalAccessToken): AccessToken {
return <AccessToken>{
id: token.id,
label: token.label,
createdOn: token.createdOn,
expiresOn: token.expiresOn,
lastUsedOn: token.updatedOn,
};
}
/**
* Extract UUID from the token
*
* @param token Personal Access Token
* @returns UUID of the token
*/
private extractUUID(token): string | null {
if (!token.startsWith(this.TOKEN_PREFIX)) return null;
return token.slice(this.TOKEN_PREFIX.length);
}
/**
* Create a Personal Access Token
*
* @param createAccessTokenDto DTO for creating a Personal Access Token
* @param user AuthUser object
* @returns Either of the created token or error message
*/
async createPAT(createAccessTokenDto: CreateAccessTokenDto, user: AuthUser) {
const isTitleValid = isValidLength(
createAccessTokenDto.label,
this.TITLE_LENGTH,
);
if (!isTitleValid)
return E.left({
message: ACCESS_TOKEN_LABEL_SHORT,
statusCode: HttpStatus.BAD_REQUEST,
});
if (!this.validateExpirationDate(createAccessTokenDto.expiryInDays))
return E.left({
message: ACCESS_TOKEN_EXPIRY_INVALID,
statusCode: HttpStatus.BAD_REQUEST,
});
const createdPAT = await this.prisma.personalAccessToken.create({
data: {
userUid: user.uid,
label: createAccessTokenDto.label,
expiresOn: this.calculateExpirationDate(
createAccessTokenDto.expiryInDays,
),
},
});
const res: CreateAccessTokenResponse = {
token: `${this.TOKEN_PREFIX}${createdPAT.token}`,
info: this.cast(createdPAT),
};
return E.right(res);
}
/**
* Delete a Personal Access Token
*
* @param accessTokenID ID of the Personal Access Token
* @returns Either of true or error message
*/
async deletePAT(accessTokenID: string) {
try {
await this.prisma.personalAccessToken.delete({
where: { id: accessTokenID },
});
return E.right(true);
} catch {
return E.left({
message: ACCESS_TOKEN_NOT_FOUND,
statusCode: HttpStatus.NOT_FOUND,
});
}
}
/**
* List all Personal Access Tokens of a user
*
* @param userUid UID of the user
* @param offset Offset for pagination
* @param limit Limit for pagination
* @returns Either of the list of Personal Access Tokens or error message
*/
async listAllUserPAT(userUid: string, offset: number, limit: number) {
const userPATs = await this.prisma.personalAccessToken.findMany({
where: {
userUid: userUid,
},
skip: offset,
take: limit,
orderBy: {
createdOn: 'desc',
},
});
const userAccessTokenList = userPATs.map((pat) => this.cast(pat));
return userAccessTokenList;
}
/**
* Get a Personal Access Token
*
* @param accessToken Personal Access Token
* @returns Either of the Personal Access Token or error message
*/
async getUserPAT(accessToken: string) {
const extractedToken = this.extractUUID(accessToken);
if (!extractedToken) return E.left(ACCESS_TOKEN_NOT_FOUND);
try {
const userPAT = await this.prisma.personalAccessToken.findUniqueOrThrow({
where: { token: extractedToken },
include: { user: true },
});
return E.right(userPAT);
} catch {
return E.left(ACCESS_TOKEN_NOT_FOUND);
}
}
/**
* Update the last used date of a Personal Access Token
*
* @param token Personal Access Token
* @returns Either of the updated Personal Access Token or error message
*/
async updateLastUsedForPAT(token: string) {
const extractedToken = this.extractUUID(token);
if (!extractedToken) return E.left(ACCESS_TOKEN_NOT_FOUND);
try {
const updatedAccessToken = await this.prisma.personalAccessToken.update({
where: { token: extractedToken },
data: {
updatedOn: new Date(),
},
});
return E.right(this.cast(updatedAccessToken));
} catch {
return E.left(ACCESS_TOKEN_NOT_FOUND);
}
}
}

View File

@@ -1,5 +0,0 @@
// Inputs to create a new PAT
export class CreateAccessTokenDto {
label: string;
expiryInDays: number | null;
}

View File

@@ -1,17 +0,0 @@
import { AccessToken } from 'src/types/AccessToken';
// Response type of PAT creation method
export type CreateAccessTokenResponse = {
token: string;
info: AccessToken;
};
// Response type of any error in PAT module
export type CLIErrorResponse = {
reason: string;
};
// Return a CLIErrorResponse object
export function createCLIErrorResponse(reason: string): CLIErrorResponse {
return { reason };
}

View File

@@ -74,7 +74,6 @@ const dbAdminUsers: DbUser[] = [
refreshToken: 'refreshToken',
currentRESTSession: '',
currentGQLSession: '',
lastLoggedOn: new Date(),
createdOn: new Date(),
},
{
@@ -86,10 +85,20 @@ const dbAdminUsers: DbUser[] = [
refreshToken: 'refreshToken',
currentRESTSession: '',
currentGQLSession: '',
lastLoggedOn: new Date(),
createdOn: new Date(),
},
];
const dbNonAminUser: DbUser = {
uid: 'uid 3',
displayName: 'displayName',
email: 'email@email.com',
photoURL: 'photoURL',
isAdmin: false,
refreshToken: 'refreshToken',
currentRESTSession: '',
currentGQLSession: '',
createdOn: new Date(),
};
describe('AdminService', () => {
describe('fetchInvitedUsers', () => {

View File

@@ -27,7 +27,6 @@ import { MailerModule } from './mailer/mailer.module';
import { PosthogModule } from './posthog/posthog.module';
import { ScheduleModule } from '@nestjs/schedule';
import { HealthModule } from './health/health.module';
import { AccessTokenModule } from './access-token/access-token.module';
@Module({
imports: [
@@ -103,7 +102,6 @@ import { AccessTokenModule } from './access-token/access-token.module';
PosthogModule,
ScheduleModule.forRoot(),
HealthModule,
AccessTokenModule,
],
providers: [GQLComplexityPlugin],
controllers: [AppController],

View File

@@ -7,7 +7,6 @@ import {
Request,
Res,
UseGuards,
UseInterceptors,
} from '@nestjs/common';
import { AuthService } from './auth.service';
import { SignInMagicDto } from './dto/signin-magic.dto';
@@ -28,7 +27,6 @@ import { SkipThrottle } from '@nestjs/throttler';
import { AUTH_PROVIDER_NOT_SPECIFIED } from 'src/errors';
import { ConfigService } from '@nestjs/config';
import { throwHTTPErr } from 'src/utils';
import { UserLastLoginInterceptor } from 'src/interceptors/user-last-login.interceptor';
@UseGuards(ThrottlerBehindProxyGuard)
@Controller({ path: 'auth', version: '1' })
@@ -112,7 +110,6 @@ export class AuthController {
@Get('google/callback')
@SkipThrottle()
@UseGuards(GoogleSSOGuard)
@UseInterceptors(UserLastLoginInterceptor)
async googleAuthRedirect(@Request() req, @Res() res) {
const authTokens = await this.authService.generateAuthTokens(req.user.uid);
if (E.isLeft(authTokens)) throwHTTPErr(authTokens.left);
@@ -138,7 +135,6 @@ export class AuthController {
@Get('github/callback')
@SkipThrottle()
@UseGuards(GithubSSOGuard)
@UseInterceptors(UserLastLoginInterceptor)
async githubAuthRedirect(@Request() req, @Res() res) {
const authTokens = await this.authService.generateAuthTokens(req.user.uid);
if (E.isLeft(authTokens)) throwHTTPErr(authTokens.left);
@@ -164,7 +160,6 @@ export class AuthController {
@Get('microsoft/callback')
@SkipThrottle()
@UseGuards(MicrosoftSSOGuard)
@UseInterceptors(UserLastLoginInterceptor)
async microsoftAuthRedirect(@Request() req, @Res() res) {
const authTokens = await this.authService.generateAuthTokens(req.user.uid);
if (E.isLeft(authTokens)) throwHTTPErr(authTokens.left);

View File

@@ -51,7 +51,6 @@ const user: AuthUser = {
photoURL: 'https://en.wikipedia.org/wiki/Dwight_Schrute',
isAdmin: false,
refreshToken: 'hbfvdkhjbvkdvdfjvbnkhjb',
lastLoggedOn: currentTime,
createdOn: currentTime,
currentGQLSession: {},
currentRESTSession: {},
@@ -173,11 +172,9 @@ describe('verifyMagicLinkTokens', () => {
// generateAuthTokens
mockJWT.sign.mockReturnValue(user.refreshToken);
// UpdateUserRefreshToken
mockUser.updateUserRefreshToken.mockResolvedValueOnce(E.right(user));
mockUser.UpdateUserRefreshToken.mockResolvedValueOnce(E.right(user));
// deletePasswordlessVerificationToken
mockPrisma.verificationToken.delete.mockResolvedValueOnce(passwordlessData);
// usersService.updateUserLastLoggedOn
mockUser.updateUserLastLoggedOn.mockResolvedValue(E.right(true));
const result = await authService.verifyMagicLinkTokens(magicLinkVerify);
expect(result).toEqualRight({
@@ -200,11 +197,9 @@ describe('verifyMagicLinkTokens', () => {
// generateAuthTokens
mockJWT.sign.mockReturnValue(user.refreshToken);
// UpdateUserRefreshToken
mockUser.updateUserRefreshToken.mockResolvedValueOnce(E.right(user));
mockUser.UpdateUserRefreshToken.mockResolvedValueOnce(E.right(user));
// deletePasswordlessVerificationToken
mockPrisma.verificationToken.delete.mockResolvedValueOnce(passwordlessData);
// usersService.updateUserLastLoggedOn
mockUser.updateUserLastLoggedOn.mockResolvedValue(E.right(true));
const result = await authService.verifyMagicLinkTokens(magicLinkVerify);
expect(result).toEqualRight({
@@ -244,7 +239,7 @@ describe('verifyMagicLinkTokens', () => {
// generateAuthTokens
mockJWT.sign.mockReturnValue(user.refreshToken);
// UpdateUserRefreshToken
mockUser.updateUserRefreshToken.mockResolvedValueOnce(
mockUser.UpdateUserRefreshToken.mockResolvedValueOnce(
E.left(USER_NOT_FOUND),
);
@@ -269,7 +264,7 @@ describe('verifyMagicLinkTokens', () => {
// generateAuthTokens
mockJWT.sign.mockReturnValue(user.refreshToken);
// UpdateUserRefreshToken
mockUser.updateUserRefreshToken.mockResolvedValueOnce(E.right(user));
mockUser.UpdateUserRefreshToken.mockResolvedValueOnce(E.right(user));
// deletePasswordlessVerificationToken
mockPrisma.verificationToken.delete.mockRejectedValueOnce('RecordNotFound');
@@ -285,7 +280,7 @@ describe('generateAuthTokens', () => {
test('Should successfully generate tokens with valid inputs', async () => {
mockJWT.sign.mockReturnValue(user.refreshToken);
// UpdateUserRefreshToken
mockUser.updateUserRefreshToken.mockResolvedValueOnce(E.right(user));
mockUser.UpdateUserRefreshToken.mockResolvedValueOnce(E.right(user));
const result = await authService.generateAuthTokens(user.uid);
expect(result).toEqualRight({
@@ -297,7 +292,7 @@ describe('generateAuthTokens', () => {
test('Should throw USER_NOT_FOUND when updating refresh tokens fails', async () => {
mockJWT.sign.mockReturnValue(user.refreshToken);
// UpdateUserRefreshToken
mockUser.updateUserRefreshToken.mockResolvedValueOnce(
mockUser.UpdateUserRefreshToken.mockResolvedValueOnce(
E.left(USER_NOT_FOUND),
);
@@ -324,7 +319,7 @@ describe('refreshAuthTokens', () => {
// generateAuthTokens
mockJWT.sign.mockReturnValue(user.refreshToken);
// UpdateUserRefreshToken
mockUser.updateUserRefreshToken.mockResolvedValueOnce(
mockUser.UpdateUserRefreshToken.mockResolvedValueOnce(
E.left(USER_NOT_FOUND),
);
@@ -353,7 +348,7 @@ describe('refreshAuthTokens', () => {
// generateAuthTokens
mockJWT.sign.mockReturnValue('sdhjcbjsdhcbshjdcb');
// UpdateUserRefreshToken
mockUser.updateUserRefreshToken.mockResolvedValueOnce(
mockUser.UpdateUserRefreshToken.mockResolvedValueOnce(
E.right({
...user,
refreshToken: 'sdhjcbjsdhcbshjdcb',

View File

@@ -112,7 +112,7 @@ export class AuthService {
const refreshTokenHash = await argon2.hash(refreshToken);
const updatedUser = await this.usersService.updateUserRefreshToken(
const updatedUser = await this.usersService.UpdateUserRefreshToken(
refreshTokenHash,
userUid,
);
@@ -320,8 +320,6 @@ export class AuthService {
statusCode: HttpStatus.NOT_FOUND,
});
this.usersService.updateUserLastLoggedOn(passwordlessTokens.value.userUid);
return E.right(tokens.right);
}

View File

@@ -761,39 +761,3 @@ export const POSTHOG_CLIENT_NOT_INITIALIZED = 'posthog/client_not_initialized';
* Inputs supplied are invalid
*/
export const INVALID_PARAMS = 'invalid_parameters' as const;
/**
* The provided label for the access-token is short (less than 3 characters)
* (AccessTokenService)
*/
export const ACCESS_TOKEN_LABEL_SHORT = 'access_token/label_too_short';
/**
* The provided expiryInDays value is not valid
* (AccessTokenService)
*/
export const ACCESS_TOKEN_EXPIRY_INVALID = 'access_token/expiry_days_invalid';
/**
* The provided PAT ID is invalid
* (AccessTokenService)
*/
export const ACCESS_TOKEN_NOT_FOUND = 'access_token/access_token_not_found';
/**
* AccessTokens is expired
* (AccessTokenService)
*/
export const ACCESS_TOKENS_EXPIRED = 'TOKEN_EXPIRED';
/**
* AccessTokens is invalid
* (AccessTokenService)
*/
export const ACCESS_TOKENS_INVALID = 'TOKEN_INVALID';
/**
* AccessTokens is invalid
* (AccessTokenService)
*/
export const ACCESS_TOKENS_INVALID_DATA_ID = 'INVALID_ID';

View File

@@ -1,48 +0,0 @@
import {
BadRequestException,
CanActivate,
ExecutionContext,
Injectable,
} from '@nestjs/common';
import { Request } from 'express';
import { AccessTokenService } from 'src/access-token/access-token.service';
import * as E from 'fp-ts/Either';
import { DateTime } from 'luxon';
import { ACCESS_TOKENS_EXPIRED, ACCESS_TOKENS_INVALID } from 'src/errors';
import { createCLIErrorResponse } from 'src/access-token/helper';
@Injectable()
export class PATAuthGuard implements CanActivate {
constructor(private accessTokenService: AccessTokenService) {}
async canActivate(context: ExecutionContext): Promise<boolean> {
const request = context.switchToHttp().getRequest();
const token = this.extractTokenFromHeader(request);
if (!token) {
throw new BadRequestException(
createCLIErrorResponse(ACCESS_TOKENS_INVALID),
);
}
const userAccessToken = await this.accessTokenService.getUserPAT(token);
if (E.isLeft(userAccessToken))
throw new BadRequestException(
createCLIErrorResponse(ACCESS_TOKENS_INVALID),
);
request.user = userAccessToken.right.user;
const accessToken = userAccessToken.right;
if (accessToken.expiresOn === null) return true;
const today = DateTime.now().toISO();
if (accessToken.expiresOn.toISOString() > today) return true;
throw new BadRequestException(
createCLIErrorResponse(ACCESS_TOKENS_EXPIRED),
);
}
private extractTokenFromHeader(request: Request): string | undefined {
const [type, token] = request.headers.authorization?.split(' ') ?? [];
return type === 'Bearer' ? token : undefined;
}
}

View File

@@ -1,34 +0,0 @@
import {
CallHandler,
ExecutionContext,
Injectable,
NestInterceptor,
UnauthorizedException,
} from '@nestjs/common';
import { Observable, map } from 'rxjs';
import { AccessTokenService } from 'src/access-token/access-token.service';
import * as E from 'fp-ts/Either';
@Injectable()
export class AccessTokenInterceptor implements NestInterceptor {
constructor(private readonly accessTokenService: AccessTokenService) {}
intercept(context: ExecutionContext, handler: CallHandler): Observable<any> {
const req = context.switchToHttp().getRequest();
const authHeader = req.headers.authorization;
const token = authHeader && authHeader.split(' ')[1];
if (!token) {
throw new UnauthorizedException();
}
return handler.handle().pipe(
map(async (data) => {
const userAccessToken =
await this.accessTokenService.updateLastUsedForPAT(token);
if (E.isLeft(userAccessToken)) throw new UnauthorizedException();
return data;
}),
);
}
}

View File

@@ -1,26 +0,0 @@
import {
Injectable,
NestInterceptor,
ExecutionContext,
CallHandler,
} from '@nestjs/common';
import { Observable } from 'rxjs';
import { tap } from 'rxjs/operators';
import { AuthUser } from 'src/types/AuthUser';
import { UserService } from 'src/user/user.service';
@Injectable()
export class UserLastLoginInterceptor implements NestInterceptor {
constructor(private userService: UserService) {}
intercept(context: ExecutionContext, next: CallHandler): Observable<any> {
const user: AuthUser = context.switchToHttp().getRequest().user;
const now = Date.now();
return next.handle().pipe(
tap(() => {
this.userService.updateUserLastLoggedOn(user.uid);
}),
);
}
}

View File

@@ -48,7 +48,6 @@ const user: AuthUser = {
photoURL: 'https://en.wikipedia.org/wiki/Dwight_Schrute',
isAdmin: false,
refreshToken: 'hbfvdkhjbvkdvdfjvbnkhjb',
lastLoggedOn: createdOn,
createdOn: createdOn,
currentGQLSession: {},
currentRESTSession: {},

View File

@@ -1,5 +1,3 @@
import { TeamRequest } from '@prisma/client';
// Type of data returned from the query to obtain all search results
export type SearchQueryReturnType = {
id: string;
@@ -14,12 +12,3 @@ export type ParentTreeQueryReturnType = {
parentID: string;
title: string;
};
// Type of data returned from the query to fetch collection details from CLI
export type GetCollectionResponse = {
id: string;
data: string | null;
title: string;
parentID: string | null;
folders: GetCollectionResponse[];
requests: TeamRequest[];
};

View File

@@ -12,7 +12,6 @@ import {
TEAM_COL_REORDERING_FAILED,
TEAM_COL_SAME_NEXT_COLL,
TEAM_INVALID_COLL_ID,
TEAM_MEMBER_NOT_FOUND,
TEAM_NOT_OWNER,
} from 'src/errors';
import { PrismaService } from 'src/prisma/prisma.service';
@@ -20,18 +19,15 @@ import { PubSubService } from 'src/pubsub/pubsub.service';
import { AuthUser } from 'src/types/AuthUser';
import { TeamCollectionService } from './team-collection.service';
import { TeamCollection } from './team-collection.model';
import { TeamService } from 'src/team/team.service';
const mockPrisma = mockDeep<PrismaService>();
const mockPubSub = mockDeep<PubSubService>();
const mockTeamService = mockDeep<TeamService>();
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const teamCollectionService = new TeamCollectionService(
mockPrisma,
mockPubSub as any,
mockTeamService,
);
const currentTime = new Date();
@@ -43,7 +39,6 @@ const user: AuthUser = {
photoURL: 'https://en.wikipedia.org/wiki/Dwight_Schrute',
isAdmin: false,
refreshToken: 'hbfvdkhjbvkdvdfjvbnkhjb',
lastLoggedOn: currentTime,
createdOn: currentTime,
currentGQLSession: {},
currentRESTSession: {},
@@ -1743,63 +1738,3 @@ describe('updateTeamCollection', () => {
});
//ToDo: write test cases for exportCollectionsToJSON
describe('getCollectionForCLI', () => {
test('should throw TEAM_COLL_NOT_FOUND if collectionID is invalid', async () => {
mockPrisma.teamCollection.findUniqueOrThrow.mockRejectedValueOnce(
'NotFoundError',
);
const result = await teamCollectionService.getCollectionForCLI(
'invalidID',
user.uid,
);
expect(result).toEqualLeft(TEAM_COLL_NOT_FOUND);
});
test('should throw TEAM_MEMBER_NOT_FOUND if user not in same team', async () => {
mockPrisma.teamCollection.findUniqueOrThrow.mockResolvedValueOnce(
rootTeamCollection,
);
mockTeamService.getTeamMember.mockResolvedValue(null);
const result = await teamCollectionService.getCollectionForCLI(
rootTeamCollection.id,
user.uid,
);
expect(result).toEqualLeft(TEAM_MEMBER_NOT_FOUND);
});
// test('should return the TeamCollection data for CLI', async () => {
// mockPrisma.teamCollection.findUniqueOrThrow.mockResolvedValueOnce(
// rootTeamCollection,
// );
// mockTeamService.getTeamMember.mockResolvedValue({
// membershipID: 'sdc3sfdv',
// userUid: user.uid,
// role: TeamMemberRole.OWNER,
// });
// const result = await teamCollectionService.getCollectionForCLI(
// rootTeamCollection.id,
// user.uid,
// );
// expect(result).toEqualRight({
// id: rootTeamCollection.id,
// data: JSON.stringify(rootTeamCollection.data),
// title: rootTeamCollection.title,
// parentID: rootTeamCollection.parentID,
// folders: [
// {
// id: childTeamCollection.id,
// data: JSON.stringify(childTeamCollection.data),
// title: childTeamCollection.title,
// parentID: childTeamCollection.parentID,
// folders: [],
// requests: [],
// },
// ],
// requests: [],
// });
// });
});

View File

@@ -18,34 +18,23 @@ import {
TEAM_COL_SEARCH_FAILED,
TEAM_REQ_PARENT_TREE_GEN_FAILED,
TEAM_COLL_PARENT_TREE_GEN_FAILED,
TEAM_MEMBER_NOT_FOUND,
} from '../errors';
import { PubSubService } from '../pubsub/pubsub.service';
import { escapeSqlLikeString, isValidLength } from 'src/utils';
import * as E from 'fp-ts/Either';
import * as O from 'fp-ts/Option';
import {
Prisma,
TeamCollection as DBTeamCollection,
TeamRequest,
} from '@prisma/client';
import { Prisma, TeamCollection as DBTeamCollection } from '@prisma/client';
import { CollectionFolder } from 'src/types/CollectionFolder';
import { stringToJson } from 'src/utils';
import { CollectionSearchNode } from 'src/types/CollectionSearchNode';
import {
GetCollectionResponse,
ParentTreeQueryReturnType,
SearchQueryReturnType,
} from './helper';
import { ParentTreeQueryReturnType, SearchQueryReturnType } from './helper';
import { RESTError } from 'src/types/RESTError';
import { TeamService } from 'src/team/team.service';
@Injectable()
export class TeamCollectionService {
constructor(
private readonly prisma: PrismaService,
private readonly pubsub: PubSubService,
private readonly teamService: TeamService,
) {}
TITLE_LENGTH = 3;
@@ -1355,95 +1344,4 @@ export class TeamCollectionService {
return E.left(TEAM_REQ_PARENT_TREE_GEN_FAILED);
}
}
/**
* Get all requests in a collection
*
* @param collectionID The Collection ID
* @returns A list of all requests in the collection
*/
private async getAllRequestsInCollection(collectionID: string) {
const dbTeamRequests = await this.prisma.teamRequest.findMany({
where: {
collectionID: collectionID,
},
orderBy: {
orderIndex: 'asc',
},
});
const teamRequests = dbTeamRequests.map((tr) => {
return <TeamRequest>{
id: tr.id,
collectionID: tr.collectionID,
teamID: tr.teamID,
title: tr.title,
request: JSON.stringify(tr.request),
};
});
return teamRequests;
}
/**
* Get Collection Tree for CLI
*
* @param parentID The parent Collection ID
* @returns Collection tree for CLI
*/
private async getCollectionTreeForCLI(parentID: string | null) {
const childCollections = await this.prisma.teamCollection.findMany({
where: { parentID },
orderBy: { orderIndex: 'asc' },
});
const response: GetCollectionResponse[] = [];
for (const collection of childCollections) {
const folder: GetCollectionResponse = {
id: collection.id,
data: collection.data === null ? null : JSON.stringify(collection.data),
title: collection.title,
parentID: collection.parentID,
folders: await this.getCollectionTreeForCLI(collection.id),
requests: await this.getAllRequestsInCollection(collection.id),
};
response.push(folder);
}
return response;
}
/**
* Get Collection for CLI
*
* @param collectionID The Collection ID
* @param userUid The User UID
* @returns An Either of the Collection details
*/
async getCollectionForCLI(collectionID: string, userUid: string) {
try {
const collection = await this.prisma.teamCollection.findUniqueOrThrow({
where: { id: collectionID },
});
const teamMember = await this.teamService.getTeamMember(
collection.teamID,
userUid,
);
if (!teamMember) return E.left(TEAM_MEMBER_NOT_FOUND);
return E.right(<GetCollectionResponse>{
id: collection.id,
data: collection.data === null ? null : JSON.stringify(collection.data),
title: collection.title,
parentID: collection.parentID,
folders: await this.getCollectionTreeForCLI(collection.id),
requests: await this.getAllRequestsInCollection(collection.id),
});
} catch (error) {
return E.left(TEAM_COLL_NOT_FOUND);
}
}
}

View File

@@ -6,24 +6,19 @@ import {
JSON_INVALID,
TEAM_ENVIRONMENT_NOT_FOUND,
TEAM_ENVIRONMENT_SHORT_NAME,
TEAM_MEMBER_NOT_FOUND,
} from 'src/errors';
import { TeamService } from 'src/team/team.service';
import { TeamMemberRole } from 'src/team/team.model';
const mockPrisma = mockDeep<PrismaService>();
const mockPubSub = {
publish: jest.fn().mockResolvedValue(null),
};
const mockTeamService = mockDeep<TeamService>();
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const teamEnvironmentsService = new TeamEnvironmentsService(
mockPrisma,
mockPubSub as any,
mockTeamService,
);
const teamEnvironment = {
@@ -385,47 +380,4 @@ describe('TeamEnvironmentsService', () => {
expect(result).toEqual(0);
});
});
describe('getTeamEnvironmentForCLI', () => {
test('should successfully return a TeamEnvironment with valid ID', async () => {
mockPrisma.teamEnvironment.findFirstOrThrow.mockResolvedValueOnce(
teamEnvironment,
);
mockTeamService.getTeamMember.mockResolvedValue({
membershipID: 'sdc3sfdv',
userUid: '123454',
role: TeamMemberRole.OWNER,
});
const result = await teamEnvironmentsService.getTeamEnvironmentForCLI(
teamEnvironment.id,
'123454',
);
expect(result).toEqualRight(teamEnvironment);
});
test('should throw TEAM_ENVIRONMENT_NOT_FOUND with invalid ID', async () => {
mockPrisma.teamEnvironment.findFirstOrThrow.mockRejectedValueOnce(
'RejectOnNotFound',
);
const result = await teamEnvironmentsService.getTeamEnvironment(
teamEnvironment.id,
);
expect(result).toEqualLeft(TEAM_ENVIRONMENT_NOT_FOUND);
});
test('should throw TEAM_MEMBER_NOT_FOUND if user not in same team', async () => {
mockPrisma.teamEnvironment.findFirstOrThrow.mockResolvedValueOnce(
teamEnvironment,
);
mockTeamService.getTeamMember.mockResolvedValue(null);
const result = await teamEnvironmentsService.getTeamEnvironmentForCLI(
teamEnvironment.id,
'333',
);
expect(result).toEqualLeft(TEAM_MEMBER_NOT_FOUND);
});
});
});

View File

@@ -6,17 +6,14 @@ import { TeamEnvironment } from './team-environments.model';
import {
TEAM_ENVIRONMENT_NOT_FOUND,
TEAM_ENVIRONMENT_SHORT_NAME,
TEAM_MEMBER_NOT_FOUND,
} from 'src/errors';
import * as E from 'fp-ts/Either';
import { isValidLength } from 'src/utils';
import { TeamService } from 'src/team/team.service';
@Injectable()
export class TeamEnvironmentsService {
constructor(
private readonly prisma: PrismaService,
private readonly pubsub: PubSubService,
private readonly teamService: TeamService,
) {}
TITLE_LENGTH = 3;
@@ -245,30 +242,4 @@ export class TeamEnvironmentsService {
});
return envCount;
}
/**
* Get details of a TeamEnvironment for CLI.
*
* @param id TeamEnvironment ID
* @param userUid User UID
* @returns Either of a TeamEnvironment or error message
*/
async getTeamEnvironmentForCLI(id: string, userUid: string) {
try {
const teamEnvironment =
await this.prisma.teamEnvironment.findFirstOrThrow({
where: { id },
});
const teamMember = await this.teamService.getTeamMember(
teamEnvironment.teamID,
userUid,
);
if (!teamMember) return E.left(TEAM_MEMBER_NOT_FOUND);
return E.right(teamEnvironment);
} catch (error) {
return E.left(TEAM_ENVIRONMENT_NOT_FOUND);
}
}
}

View File

@@ -1,7 +0,0 @@
export type AccessToken = {
id: string;
label: string;
createdOn: Date;
lastUsedOn: Date;
expiresOn: null | Date;
};

View File

@@ -5,6 +5,6 @@ import { HttpStatus } from '@nestjs/common';
** Since its REST we need to return the HTTP status code along with the error message
*/
export type RESTError = {
message: string | Record<string, string>;
message: string;
statusCode: HttpStatus;
};

View File

@@ -38,7 +38,6 @@ const user: AuthUser = {
photoURL: 'https://en.wikipedia.org/wiki/Dwight_Schrute',
isAdmin: false,
refreshToken: 'hbfvdkhjbvkdvdfjvbnkhjb',
lastLoggedOn: currentTime,
createdOn: currentTime,
currentGQLSession: {},
currentRESTSession: {},

View File

@@ -41,7 +41,6 @@ const user: AuthUser = {
photoURL: 'https://example.com/photo.png',
isAdmin: false,
refreshToken: null,
lastLoggedOn: new Date(),
createdOn: new Date(),
currentGQLSession: null,
currentRESTSession: null,

View File

@@ -27,7 +27,6 @@ const user: AuthUser = {
refreshToken: 'hbfvdkhjbvkdvdfjvbnkhjb',
currentGQLSession: {},
currentRESTSession: {},
lastLoggedOn: currentTime,
createdOn: currentTime,
};

View File

@@ -30,12 +30,6 @@ export class User {
})
isAdmin: boolean;
@Field({
nullable: true,
description: 'Date when the user last logged in',
})
lastLoggedOn: Date;
@Field({
description: 'Date when the user account was created',
})

View File

@@ -42,7 +42,6 @@ const user: AuthUser = {
currentRESTSession: {},
currentGQLSession: {},
refreshToken: 'hbfvdkhjbvkdvdfjvbnkhjb',
lastLoggedOn: currentTime,
createdOn: currentTime,
};
@@ -55,7 +54,6 @@ const adminUser: AuthUser = {
currentRESTSession: {},
currentGQLSession: {},
refreshToken: 'hbfvdkhjbvkdvdfjvbnkhjb',
lastLoggedOn: currentTime,
createdOn: currentTime,
};
@@ -69,7 +67,6 @@ const users: AuthUser[] = [
currentRESTSession: {},
currentGQLSession: {},
refreshToken: 'hbfvdkhjbvkdvdfjvbnkhjb',
lastLoggedOn: currentTime,
createdOn: currentTime,
},
{
@@ -81,7 +78,6 @@ const users: AuthUser[] = [
currentRESTSession: {},
currentGQLSession: {},
refreshToken: 'hbfvdkhjbvkdvdfjvbnkhjb',
lastLoggedOn: currentTime,
createdOn: currentTime,
},
{
@@ -93,7 +89,6 @@ const users: AuthUser[] = [
currentRESTSession: {},
currentGQLSession: {},
refreshToken: 'hbfvdkhjbvkdvdfjvbnkhjb',
lastLoggedOn: currentTime,
createdOn: currentTime,
},
];
@@ -108,7 +103,6 @@ const adminUsers: AuthUser[] = [
currentRESTSession: {},
currentGQLSession: {},
refreshToken: 'hbfvdkhjbvkdvdfjvbnkhjb',
lastLoggedOn: currentTime,
createdOn: currentTime,
},
{
@@ -120,7 +114,6 @@ const adminUsers: AuthUser[] = [
currentRESTSession: {},
currentGQLSession: {},
refreshToken: 'hbfvdkhjbvkdvdfjvbnkhjb',
lastLoggedOn: currentTime,
createdOn: currentTime,
},
{
@@ -132,7 +125,6 @@ const adminUsers: AuthUser[] = [
currentRESTSession: {},
currentGQLSession: {},
refreshToken: 'hbfvdkhjbvkdvdfjvbnkhjb',
lastLoggedOn: currentTime,
createdOn: currentTime,
},
];
@@ -503,26 +495,6 @@ describe('UserService', () => {
});
});
describe('updateUserLastLoggedOn', () => {
test('should resolve right and update user last logged on', async () => {
const currentTime = new Date();
mockPrisma.user.update.mockResolvedValueOnce({
...user,
lastLoggedOn: currentTime,
});
const result = await userService.updateUserLastLoggedOn(user.uid);
expect(result).toEqualRight(true);
});
test('should resolve left and error when invalid user uid is passed', async () => {
mockPrisma.user.update.mockRejectedValueOnce('NotFoundError');
const result = await userService.updateUserLastLoggedOn('invalidUserUid');
expect(result).toEqualLeft(USER_NOT_FOUND);
});
});
describe('fetchAllUsers', () => {
test('should resolve right and return 20 users when cursor is null', async () => {
mockPrisma.user.findMany.mockResolvedValueOnce(users);

View File

@@ -114,7 +114,7 @@ export class UserService {
* @param userUid User uid
* @returns Either of User with updated refreshToken
*/
async updateUserRefreshToken(refreshTokenHash: string, userUid: string) {
async UpdateUserRefreshToken(refreshTokenHash: string, userUid: string) {
try {
const user = await this.prisma.user.update({
where: {
@@ -174,7 +174,6 @@ export class UserService {
displayName: userDisplayName,
email: profile.emails[0].value,
photoURL: userPhotoURL,
lastLoggedOn: new Date(),
providerAccounts: {
create: {
provider: profile.provider,
@@ -222,7 +221,7 @@ export class UserService {
}
/**
* Update User displayName and photoURL when logged in via a SSO provider
* Update User displayName and photoURL
*
* @param user User object
* @param profile Data received from SSO provider on the users account
@@ -237,7 +236,6 @@ export class UserService {
data: {
displayName: !profile.displayName ? null : profile.displayName,
photoURL: !profile.photos ? null : profile.photos[0].value,
lastLoggedOn: new Date(),
},
});
return E.right(updatedUser);
@@ -291,7 +289,7 @@ export class UserService {
}
/**
* Update a user's displayName
* Update a user's data
* @param userUID User UID
* @param displayName User's displayName
* @returns a Either of User or error
@@ -318,22 +316,6 @@ export class UserService {
}
}
/**
* Update user's lastLoggedOn timestamp
* @param userUID User UID
*/
async updateUserLastLoggedOn(userUid: string) {
try {
await this.prisma.user.update({
where: { uid: userUid },
data: { lastLoggedOn: new Date() },
});
return E.right(true);
} catch (e) {
return E.left(USER_NOT_FOUND);
}
}
/**
* Validate and parse currentRESTSession and currentGQLSession
* @param sessionData string of the session

View File

@@ -3,7 +3,7 @@ import { ExecException } from "child_process";
import { HoppErrorCode } from "../../types/errors";
import { runCLI, getErrorCode, getTestJsonFilePath } from "../utils";
describe("Test `hopp test <file_path_or_id>` command:", () => {
describe("Test `hopp test <file>` command:", () => {
describe("Argument parsing", () => {
test("Errors with the code `INVALID_ARGUMENT` for not supplying enough arguments", async () => {
const args = "test";
@@ -131,7 +131,7 @@ describe("Test `hopp test <file_path_or_id>` command:", () => {
});
});
describe("Test `hopp test <file_path_or_id> --env <file_path_or_id>` command:", () => {
describe("Test `hopp test <file> --env <file>` command:", () => {
describe("Supplied environment export file validations", () => {
const VALID_TEST_ARGS = `test ${getTestJsonFilePath("passes-coll.json", "collection")}`;
@@ -310,7 +310,7 @@ describe("Test `hopp test <file_path_or_id> --env <file_path_or_id>` command:",
});
});
describe("Test `hopp test <file_path_or_id> --delay <delay_in_ms>` command:", () => {
describe("Test `hopp test <file> --delay <delay_in_ms>` command:", () => {
const VALID_TEST_ARGS = `test ${getTestJsonFilePath("passes-coll.json", "collection")}`;
test("Errors with the code `INVALID_ARGUMENT` on not supplying a delay value", async () => {
@@ -343,116 +343,3 @@ describe("Test `hopp test <file_path_or_id> --delay <delay_in_ms>` command:", ()
expect(error).toBeNull();
});
});
describe.skip("Test `hopp test <file_path_or_id> --env <file_path_or_id> --token <access_token> --server <server_url>` command:", () => {
const {
REQ_BODY_ENV_VARS_COLL_ID,
COLLECTION_LEVEL_HEADERS_AUTH_COLL_ID,
REQ_BODY_ENV_VARS_ENVS_ID,
PERSONAL_ACCESS_TOKEN,
} = process.env;
if (
!REQ_BODY_ENV_VARS_COLL_ID ||
!COLLECTION_LEVEL_HEADERS_AUTH_COLL_ID ||
!REQ_BODY_ENV_VARS_ENVS_ID ||
!PERSONAL_ACCESS_TOKEN
) {
return;
}
const SERVER_URL = "https://stage-shc.hoppscotch.io/backend";
describe("Validations", () => {
test("Errors with the code `INVALID_ARGUMENT` on not supplying a value for the `--token` flag", async () => {
const args = `test ${REQ_BODY_ENV_VARS_COLL_ID} --token`;
const { stderr } = await runCLI(args);
const out = getErrorCode(stderr);
expect(out).toBe<HoppErrorCode>("INVALID_ARGUMENT");
});
test("Errors with the code `INVALID_ARGUMENT` on not supplying a value for the `--server` flag", async () => {
const args = `test ${REQ_BODY_ENV_VARS_COLL_ID} --server`;
const { stderr } = await runCLI(args);
const out = getErrorCode(stderr);
expect(out).toBe<HoppErrorCode>("INVALID_ARGUMENT");
});
test("Errors with the code `TOKEN_INVALID` if the supplied access token is invalid", async () => {
const args = `test ${REQ_BODY_ENV_VARS_COLL_ID} --token invalid-token --server ${SERVER_URL}`;
const { stderr } = await runCLI(args);
const out = getErrorCode(stderr);
expect(out).toBe<HoppErrorCode>("TOKEN_INVALID");
});
test("Errors with the code `TOKEN_INVALID` if the supplied collection ID is invalid", async () => {
const args = `test invalid-id --token ${PERSONAL_ACCESS_TOKEN} --server ${SERVER_URL}`;
const { stderr } = await runCLI(args);
const out = getErrorCode(stderr);
expect(out).toBe<HoppErrorCode>("INVALID_ID");
});
test("Errors with the code `TOKEN_INVALID` if the supplied environment ID is invalid", async () => {
const args = `test ${REQ_BODY_ENV_VARS_COLL_ID} --env invalid-id --token ${PERSONAL_ACCESS_TOKEN} --server ${SERVER_URL}`;
const { stderr } = await runCLI(args);
const out = getErrorCode(stderr);
expect(out).toBe<HoppErrorCode>("INVALID_ID");
});
});
test("Successfully retrieves a collection with the ID", async () => {
const args = `test ${COLLECTION_LEVEL_HEADERS_AUTH_COLL_ID} --token ${PERSONAL_ACCESS_TOKEN} --server ${SERVER_URL}`;
const { error } = await runCLI(args);
expect(error).toBeNull();
});
test("Successfully retrieves collections and environments from a workspace using their respective IDs", async () => {
const args = `test ${REQ_BODY_ENV_VARS_COLL_ID} --env ${REQ_BODY_ENV_VARS_ENVS_ID} --token ${PERSONAL_ACCESS_TOKEN} --server ${SERVER_URL}`;
const { error } = await runCLI(args);
expect(error).toBeNull();
});
test("Supports specifying collection file path along with environment ID", async () => {
const TESTS_PATH = getTestJsonFilePath(
"req-body-env-vars-coll.json",
"collection"
);
const args = `test ${TESTS_PATH} --env ${REQ_BODY_ENV_VARS_ENVS_ID} --token ${PERSONAL_ACCESS_TOKEN} --server ${SERVER_URL}`;
const { error } = await runCLI(args);
expect(error).toBeNull();
});
test("Supports specifying environment file path along with collection ID", async () => {
const ENV_PATH = getTestJsonFilePath(
"req-body-env-vars-envs.json",
"environment"
);
const args = `test ${REQ_BODY_ENV_VARS_COLL_ID} --env ${ENV_PATH} --token ${PERSONAL_ACCESS_TOKEN} --server ${SERVER_URL}`;
const { error } = await runCLI(args);
expect(error).toBeNull();
});
test("Supports specifying both collection and environment file paths", async () => {
const TESTS_PATH = getTestJsonFilePath(
"req-body-env-vars-coll.json",
"collection"
);
const ENV_PATH = getTestJsonFilePath(
"req-body-env-vars-envs.json",
"environment"
);
const args = `test ${TESTS_PATH} --env ${ENV_PATH} --token ${PERSONAL_ACCESS_TOKEN}`;
const { error } = await runCLI(args);
expect(error).toBeNull();
});
});

View File

@@ -1,38 +1,30 @@
import { handleError } from "../handlers/error";
import { parseDelayOption } from "../options/test/delay";
import { parseEnvsData } from "../options/test/env";
import { TestCmdOptions } from "../types/commands";
import { HoppEnvs } from "../types/request";
import { isHoppCLIError } from "../utils/checks";
import {
collectionsRunner,
collectionsRunnerExit,
collectionsRunnerResult,
} from "../utils/collections";
import { handleError } from "../handlers/error";
import { parseCollectionData } from "../utils/mutators";
import { parseEnvsData } from "../options/test/env";
import { TestCmdOptions } from "../types/commands";
import { parseDelayOption } from "../options/test/delay";
import { HoppEnvs } from "../types/request";
import { isHoppCLIError } from "../utils/checks";
export const test = (pathOrId: string, options: TestCmdOptions) => async () => {
export const test = (path: string, options: TestCmdOptions) => async () => {
try {
const delay = options.delay ? parseDelayOption(options.delay) : 0;
const delay = options.delay ? parseDelayOption(options.delay) : 0
const envs = options.env ? await parseEnvsData(options.env) : <HoppEnvs>{ global: [], selected: [] }
const collections = await parseCollectionData(path)
const envs = options.env
? await parseEnvsData(options.env, options.token, options.server)
: <HoppEnvs>{ global: [], selected: [] };
const collections = await parseCollectionData(
pathOrId,
options.token,
options.server
);
const report = await collectionsRunner({ collections, envs, delay });
const hasSucceeded = collectionsRunnerResult(report);
collectionsRunnerExit(hasSucceeded);
} catch (e) {
if (isHoppCLIError(e)) {
handleError(e);
const report = await collectionsRunner({collections, envs, delay})
const hasSucceeded = collectionsRunnerResult(report)
collectionsRunnerExit(hasSucceeded)
} catch(e) {
if(isHoppCLIError(e)) {
handleError(e)
process.exit(1);
} else throw e;
}
else throw e
}
};

View File

@@ -82,15 +82,6 @@ export const handleError = <T extends HoppErrorCode>(error: HoppError<T>) => {
case "TESTS_FAILING":
ERROR_MSG = error.data;
break;
case "TOKEN_EXPIRED":
ERROR_MSG = `Token is expired: ${error.data}`;
break;
case "TOKEN_INVALID":
ERROR_MSG = `Token is invalid/removed: ${error.data}`;
break;
case "INVALID_ID":
ERROR_MSG = `The collection/environment is not valid/not accessible to the user: ${error.data}`;
break;
}
if (!S.isEmpty(ERROR_MSG)) {

View File

@@ -49,22 +49,14 @@ program.exitOverride().configureOutput({
program
.command("test")
.argument(
"<file_path_or_id>",
"path to a hoppscotch collection.json file or collection ID from a workspace for CI testing"
)
.option(
"-e, --env <file_path_or_id>",
"path to an environment variables json file or environment ID from a workspace"
"<file_path>",
"path to a hoppscotch collection.json file for CI testing"
)
.option("-e, --env <file_path>", "path to an environment variables json file")
.option(
"-d, --delay <delay_in_ms>",
"delay in milliseconds(ms) between consecutive requests within a collection"
)
.option(
"--token <access_token>",
"personal access token to access collections/environments from a workspace"
)
.option("--server <server_url>", "server URL for SH instance")
.allowExcessArguments(false)
.allowUnknownOption(false)
.description("running hoppscotch collection.json file")
@@ -74,7 +66,7 @@ program
"https://docs.hoppscotch.io/documentation/clients/cli#commands"
)}`
)
.action(async (pathOrId, options) => await test(pathOrId, options)());
.action(async (path, options) => await test(path, options)());
export const cli = async (args: string[]) => {
try {

View File

@@ -1,6 +1,4 @@
import { Environment, EnvironmentSchemaVersion } from "@hoppscotch/data";
import axios, { AxiosError } from "axios";
import fs from "fs/promises";
import { Environment } from "@hoppscotch/data";
import { entityReference } from "verzod";
import { z } from "zod";
@@ -12,94 +10,13 @@ import {
} from "../../types/request";
import { readJsonFile } from "../../utils/mutators";
interface WorkspaceEnvironment {
id: string;
teamID: string;
name: string;
variables: HoppEnvPair[];
}
// Helper functions to transform workspace environment data to the `HoppEnvironment` format
const transformWorkspaceEnvironment = (
workspaceEnvironment: WorkspaceEnvironment
): Environment => {
const { teamID, variables, ...rest } = workspaceEnvironment;
// Add `secret` field if the data conforms to an older schema
const transformedEnvVars = variables.map((variable) => {
if (!("secret" in variable)) {
return {
...(variable as HoppEnvPair),
secret: false,
} as HoppEnvPair;
}
return variable;
});
return {
v: EnvironmentSchemaVersion,
variables: transformedEnvVars,
...rest,
};
};
/**
* Parses env json file for given path and validates the parsed env json object
* @param pathOrId Path of env.json file to be parsed
* @param [accessToken] Personal access token to fetch workspace environments
* @param [serverUrl] server URL for SH instance
* @param path Path of env.json file to be parsed
* @returns For successful parsing we get HoppEnvs object
*/
export async function parseEnvsData(
pathOrId: string,
accessToken?: string,
serverUrl?: string
) {
let contents = null;
let fileExistsInPath = null;
try {
await fs.access(pathOrId);
fileExistsInPath = true;
} catch (e) {
fileExistsInPath = false;
}
if (accessToken && !fileExistsInPath) {
try {
const hostname = serverUrl || "https://api.hoppscotch.io";
const url = `${hostname.endsWith("/") ? hostname.slice(0, -1) : hostname}/v1/access-tokens/environment/${pathOrId}`;
const { data }: { data: WorkspaceEnvironment } = await axios.get(url, {
headers: {
Authorization: `Bearer ${accessToken}`,
},
});
contents = transformWorkspaceEnvironment(data);
} catch (err) {
const errReason = (
err as AxiosError<{
reason?: any;
message: string;
error: string;
statusCode: number;
}>
).response?.data?.reason;
if (errReason) {
throw error({
code: errReason,
data: pathOrId,
});
}
}
}
if (contents === null) {
contents = await readJsonFile(pathOrId);
}
export async function parseEnvsData(path: string) {
const contents = await readJsonFile(path);
const envPairs: Array<HoppEnvPair | Record<string, string>> = [];
// The legacy key-value pair format that is still supported
@@ -116,7 +33,7 @@ export async function parseEnvsData(
// CLI doesnt support bulk environments export
// Hence we check for this case and throw an error if it matches the format
if (HoppBulkEnvExportObjectResult.success) {
throw error({ code: "BULK_ENV_FILE", path: pathOrId, data: error });
throw error({ code: "BULK_ENV_FILE", path, data: error });
}
// Checks if the environment file is of the correct format
@@ -125,7 +42,7 @@ export async function parseEnvsData(
!HoppEnvKeyPairResult.success &&
HoppEnvExportObjectResult.type === "err"
) {
throw error({ code: "MALFORMED_ENV_FILE", path: pathOrId, data: error });
throw error({ code: "MALFORMED_ENV_FILE", path, data: error });
}
if (HoppEnvKeyPairResult.success) {

View File

@@ -1,8 +1,6 @@
export type TestCmdOptions = {
env: string | undefined;
delay: string | undefined;
token: string | undefined;
server: string | undefined;
};
export type HOPP_ENV_FILE_EXT = "json";

View File

@@ -26,9 +26,6 @@ type HoppErrors = {
MALFORMED_ENV_FILE: HoppErrorPath & HoppErrorData;
BULK_ENV_FILE: HoppErrorPath & HoppErrorData;
INVALID_FILE_TYPE: HoppErrorData;
TOKEN_EXPIRED: HoppErrorData;
TOKEN_INVALID: HoppErrorData;
INVALID_ID: HoppErrorData;
};
export type HoppErrorCode = keyof HoppErrors;

View File

@@ -1,34 +1,12 @@
import {
CollectionSchemaVersion,
HoppCollection,
HoppRESTRequest,
} from "@hoppscotch/data";
import { HoppCollection, HoppRESTRequest } from "@hoppscotch/data";
import fs from "fs/promises";
import { entityReference } from "verzod";
import { z } from "zod";
import axios, { AxiosError } from "axios";
import { error } from "../types/errors";
import { FormDataEntry } from "../types/request";
import { isHoppErrnoException } from "./checks";
interface WorkspaceCollection {
id: string;
data: string | null;
title: string;
parentID: string | null;
folders: WorkspaceCollection[];
requests: WorkspaceRequest[];
}
interface WorkspaceRequest {
id: string;
collectionID: string;
teamID: string;
title: string;
request: string;
}
const getValidRequests = (
collections: HoppCollection[],
collectionFilePath: string
@@ -64,50 +42,6 @@ const getValidRequests = (
});
};
// Helper functions to transform workspace collection data to the `HoppCollection` format
const transformWorkspaceRequests = (requests: WorkspaceRequest[]) =>
requests.map(({ request }) => JSON.parse(request));
const transformChildCollections = (
childCollections: WorkspaceCollection[]
): HoppCollection[] => {
return childCollections.map(({ id, title, data, folders, requests }) => {
const parsedData = data ? JSON.parse(data) : {};
const { auth = { authType: "inherit", authActive: false }, headers = [] } =
parsedData;
return {
v: CollectionSchemaVersion,
id,
name: title,
folders: transformChildCollections(folders),
requests: transformWorkspaceRequests(requests),
auth,
headers,
};
});
};
const transformWorkspaceCollection = (
collection: WorkspaceCollection
): HoppCollection => {
const { id, title, data, requests, folders } = collection;
const parsedData = data ? JSON.parse(data) : {};
const { auth = { authType: "inherit", authActive: false }, headers = [] } =
parsedData;
return {
v: CollectionSchemaVersion,
id,
name: title,
folders: transformChildCollections(folders),
requests: transformWorkspaceRequests(requests),
auth,
headers,
};
};
/**
* Parses array of FormDataEntry to FormData.
* @param values Array of FormDataEntry.
@@ -158,64 +92,16 @@ export async function readJsonFile(path: string): Promise<unknown> {
/**
* Parses collection json file for given path:context.path, and validates
* the parsed collectiona array
* @param pathOrId Collection json file path
* @param [accessToken] Personal access token to fetch workspace environments
* @param [serverUrl] server URL for SH instance
* @returns For successful parsing we get array of HoppCollection
* the parsed collectiona array.
* @param path Collection json file path.
* @returns For successful parsing we get array of HoppCollection,
*/
export async function parseCollectionData(
pathOrId: string,
accessToken?: string,
serverUrl?: string
path: string
): Promise<HoppCollection[]> {
let contents = null;
let fileExistsInPath = null;
let contents = await readJsonFile(path);
try {
await fs.access(pathOrId);
fileExistsInPath = true;
} catch (e) {
fileExistsInPath = false;
}
if (accessToken && !fileExistsInPath) {
try {
const hostname = serverUrl || "https://api.hoppscotch.io";
const url = `${hostname.endsWith("/") ? hostname.slice(0, -1) : hostname}/v1/access-tokens/collection/${pathOrId}`;
const { data }: { data: WorkspaceCollection } = await axios.get(url, {
headers: {
Authorization: `Bearer ${accessToken}`,
},
});
contents = transformWorkspaceCollection(data);
} catch (err) {
const errReason = (
err as AxiosError<{
reason?: any;
message: string;
error: string;
statusCode: number;
}>
).response?.data?.reason;
if (errReason) {
throw error({
code: errReason,
data: pathOrId,
});
}
}
}
// Fallback to reading from file if contents are not available
if (contents === null) {
contents = await readJsonFile(pathOrId);
}
const maybeArrayOfCollections: HoppCollection[] = Array.isArray(contents)
const maybeArrayOfCollections: unknown[] = Array.isArray(contents)
? contents
: [contents];
@@ -224,14 +110,12 @@ export async function parseCollectionData(
.safeParse(maybeArrayOfCollections);
if (!collectionSchemaParsedResult.success) {
console.error(`Error is `, collectionSchemaParsedResult.error);
throw error({
code: "MALFORMED_COLLECTION",
path: pathOrId,
path,
data: "Please check the collection data.",
});
}
return getValidRequests(collectionSchemaParsedResult.data, pathOrId);
return getValidRequests(collectionSchemaParsedResult.data, path);
}

View File

@@ -14,3 +14,21 @@ Since TypeScript cannot handle type information for `.vue` imports, they are shi
2. Reload the VS Code window by running `Developer: Reload Window` from the command palette.
You can learn more about Take Over mode [here](https://github.com/johnsoncodehk/volar/discussions/471).
## Building
### Prequisites
- Install Rust: `curl https://sh.rustup.rs -sSf | sh`
- If you're on Mac, remove the installation from brew first if any (brew uninstall rust && brew cleanup)
- libsoup2.4-dev installed if Linux: `sudo apt install libsoup2.4-dev`
- Node v18.20 installed and currently active if you're using nvm
### Build Instruction
1. Install latest pnpm `curl -fsSL https://get.pnpm.io/install.sh | sh -` or upgrade `pnpm add -g pnpm`
2. Setup the .env of the root project folder, you should deploy the self hosted backend first
3. Run `pnpm install` on root project folder
4. Run `pnpm dev:gql-codegen` on this folder
5. Run `pnpm tauri dev` to run debug mode (optional)
6. Run `pnpm tauri build` to build release mode
- `pnpm tauri build --target universal-apple-darwin` for Mac

View File

@@ -17,6 +17,7 @@
"@fontsource-variable/material-symbols-rounded": "5.0.16",
"@fontsource-variable/roboto-mono": "5.0.16",
"@hoppscotch/common": "workspace:^",
"@hoppscotch/data": "workspace:^",
"@platform/auth": "0.1.106",
"@tauri-apps/api": "1.5.1",
"@tauri-apps/cli": "1.5.6",
@@ -36,7 +37,13 @@
"tauri-plugin-store-api": "0.0.0",
"util": "0.12.5",
"vue": "3.3.9",
"workbox-window": "6.6.0"
"workbox-window": "6.6.0",
"zod": "3.22.4",
"@urql/core": "^4.1.1",
"cookie": "^0.5.0",
"subscriptions-transport-ws": "^0.11.0",
"tauri-plugin-websocket-api": "github:tauri-apps/tauri-plugin-websocket#v1",
"wonka": "^6.3.4"
},
"devDependencies": {
"@graphql-codegen/add": "5.0.0",
@@ -76,6 +83,8 @@
"vite-plugin-pwa": "0.13.1",
"vite-plugin-static-copy": "0.12.0",
"vite-plugin-vue-layouts": "0.7.0",
"vue-tsc": "1.8.8"
"vue-tsc": "1.8.8",
"@types/cookie": "^0.5.1"
}
}

View File

@@ -25,11 +25,13 @@ tauri = { version = "1.5.3", features = [
] }
tauri-plugin-store = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
tauri-plugin-deep-link = { git = "https://github.com/FabianLars/tauri-plugin-deep-link", branch = "main" }
tauri-plugin-websocket = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
tauri-plugin-window-state = "0.1.0"
reqwest = "0.11.22"
serde_json = "1.0.108"
url = "2.5.0"
hex_color = "3.0.0"
time = "0.3.36"
[target.'cfg(target_os = "macos")'.dependencies]
cocoa = "0.25.0"

View File

@@ -21,6 +21,7 @@ fn main() {
tauri_plugin_deep_link::prepare("io.hoppscotch.desktop");
tauri::Builder::default()
.plugin(tauri_plugin_websocket::init())
.plugin(tauri_plugin_window_state::Builder::default().build())
.plugin(tauri_plugin_store::Builder::default().build())
.setup(|app| {

View File

@@ -49,7 +49,7 @@
"targets": "all"
},
"security": {
"csp": "none"
"csp": null
},
"updater": {
"active": false

View File

@@ -0,0 +1,168 @@
import { ref } from "vue"
import { makeResult, makeErrorResult, Exchange, Operation } from "@urql/core"
import { makeFetchBody, makeFetchOptions } from "@urql/core/internal"
import { filter, make, merge, mergeMap, pipe, takeUntil, map } from "wonka"
import { gqlClientError$ } from "@hoppscotch/common/helpers/backend/GQLClient"
import { Store } from "tauri-plugin-store-api"
import { Body, getClient } from "@tauri-apps/api/http"
import { parse, serialize } from "cookie"
import { SubscriptionClient } from "subscriptions-transport-ws"
import { platform } from "@hoppscotch/common/platform"
import WSWrapper from "./ws_wrapper"
const APP_DATA_PATH = "~/.hopp-desktop-app-data.dat"
export async function addCookieToFetchHeaders(
store: Store,
headers: HeadersInit = {}
) {
try {
const accessToken = await store.get<{ value: string }>("access_token")
const refreshToken = await store.get<{ value: string }>("refresh_token")
if (accessToken?.value && refreshToken?.value) {
// Assert headers as an indexable type
const headersIndexable = headers as { [key: string]: string }
const existingCookies = parse(headersIndexable["Cookie"] || "")
if (!existingCookies.access_token) {
existingCookies.access_token = accessToken.value
}
if (!existingCookies.refresh_token) {
existingCookies.refresh_token = refreshToken.value
}
// Serialize the cookies back into the headers
const serializedCookies = Object.entries(existingCookies)
.map(([name, value]) => serialize(name, value))
.join("; ")
headersIndexable["Cookie"] = serializedCookies
}
return headers
} catch (error) {
console.error("error while injecting cookie")
}
}
function createHttpSource(operation: Operation, store: Store) {
return make(({ next, complete }) => {
getClient().then(async (httpClient) => {
const fetchOptions = makeFetchOptions(operation)
let headers = fetchOptions.headers
headers = await addCookieToFetchHeaders(store, headers)
const fetchBody = makeFetchBody(operation)
httpClient
.post(operation.context.url, Body.json(fetchBody), {
headers,
})
.then((result) => {
next(result.data)
complete()
})
.catch((error) => {
next(makeErrorResult(operation, error))
complete()
})
})
return () => {}
})
}
export const tauriGQLFetchExchange =
(store: Store): Exchange =>
({ forward }) =>
(ops$) => {
const subscriptionResults$ = pipe(
ops$,
filter((op) => op.kind === "query" || op.kind === "mutation"),
mergeMap((operation) => {
const { key, context } = operation
const teardown$ = pipe(
ops$,
filter((op: Operation) => op.kind === "teardown" && op.key === key)
)
const source = createHttpSource(operation, store)
return pipe(
source,
takeUntil(teardown$),
map((result) => makeResult(operation, result as any))
)
})
)
const forward$ = pipe(
ops$,
filter(
(op: Operation) => op.kind === "teardown" || op.kind != "subscription"
),
forward
)
return merge([subscriptionResults$, forward$])
}
const createSubscriptionClient = () => {
return new SubscriptionClient(
import.meta.env.VITE_BACKEND_WS_URL,
{
reconnect: true,
connectionParams: () => platform.auth.getBackendHeaders(),
connectionCallback(error) {
if (error?.length > 0) {
gqlClientError$.next({
type: "SUBSCRIPTION_CONN_CALLBACK_ERR_REPORT",
errors: error,
})
}
},
wsOptionArguments: [
{
store: new Store(APP_DATA_PATH),
},
],
},
WSWrapper
)
}
let subscriptionClient: SubscriptionClient | null
const isBackendGQLEventAdded = ref(false)
const resetSubscriptionClient = () => {
if (subscriptionClient) {
subscriptionClient.close()
}
subscriptionClient = createSubscriptionClient()
if (!isBackendGQLEventAdded.value) {
subscriptionClient.onConnected(() => {
platform.auth.onBackendGQLClientShouldReconnect(() => {
const currentUser = platform.auth.getCurrentUser()
if (currentUser && subscriptionClient) {
subscriptionClient?.client?.close()
}
if (currentUser && !subscriptionClient) {
resetSubscriptionClient()
}
if (!currentUser && subscriptionClient) {
subscriptionClient.close()
resetSubscriptionClient()
}
})
})
isBackendGQLEventAdded.value = true
}
}
export const getSubscriptionClient = () => {
if (!subscriptionClient) resetSubscriptionClient()
return subscriptionClient
}

View File

@@ -0,0 +1,154 @@
import { Store } from "tauri-plugin-store-api"
import TauriWebSocket, {
Message,
ConnectionConfig,
} from "tauri-plugin-websocket-api"
import { addCookieToFetchHeaders } from "./GQLClient"
/**
* This is a wrapper around tauri-plugin-websocket-api with cookie injection support. This is required because
* subscriptions-transport-ws client expects a custom websocket implementation in the shape of native browser WebSocket.
*/
export default class WebSocketWrapper extends EventTarget implements WebSocket {
public client: TauriWebSocket | undefined
private tauriWebSocketConfig:
| (ConnectionConfig & { store: Store })
| undefined
private isConnected: boolean = false
binaryType: BinaryType = "blob"
extensions = ""
onclose: ((this: WebSocket, ev: CloseEvent) => any) | null = null
onerror: ((this: WebSocket, ev: Event) => any) | null = null
onmessage: ((this: WebSocket, ev: MessageEvent) => any) | null = null
onopen: ((this: WebSocket, ev: Event) => any) | null = null
protocol = ""
url: string
public static readonly CONNECTING = 0
public static readonly OPEN = 1
public static readonly CLOSING = 2
public static readonly CLOSED = 3
readonly CONNECTING = 0
readonly OPEN = 1
readonly CLOSING = 2
readonly CLOSED = 3
constructor(
url: string,
protocols?: string | string[],
config?: ConnectionConfig & { store: Store }
) {
super()
this.url = url
this.tauriWebSocketConfig = config
this.setup()
}
private async setup() {
if (this.tauriWebSocketConfig?.store) {
const headersStringified =
this.tauriWebSocketConfig.headers || ("{}" as any)
let headers = JSON.parse(headersStringified)
headers = await addCookieToFetchHeaders(
this.tauriWebSocketConfig.store,
headers
)
this.tauriWebSocketConfig = {
...this.tauriWebSocketConfig,
headers,
}
}
this.client = await TauriWebSocket.connect(this.url, {
headers: {
"sec-websocket-protocol": "graphql-ws",
...this.tauriWebSocketConfig?.headers,
},
}).catch((error) => {
this.isConnected = false
if (this.onerror) {
this.onerror(new Event("error"))
}
throw new Error(error)
})
this.isConnected = true
this.client.addListener(this.handleMessage.bind(this))
if (this.onopen) {
this.onopen(new Event("open"))
}
}
get readyState(): number {
return this.client
? this.isConnected
? this.OPEN
: this.CLOSED
: this.CONNECTING
}
get bufferedAmount(): number {
// TODO implement
return 0
}
close(code?: number, reason?: string): void {
this.client?.disconnect().then(() => {
if (this.onclose) {
this.onclose(new CloseEvent("close"))
}
})
}
send(data: string | ArrayBufferLike | Blob | ArrayBufferView) {
if (
typeof data === "string" ||
data instanceof ArrayBuffer ||
data instanceof Blob
) {
this.client?.send(data as string).catch((error) => {
console.error("error while sending data", data)
if (this.onerror) {
this.onerror(new Event("error"))
}
})
} else {
// TODO implement, drop the record for now
console.warn(
"WebSocketWrapper.send() not implemented for non-string data"
)
}
}
private handleMessage(message: Message): void {
switch (message.type) {
case "Close": {
if (this.onclose) {
this.onclose(new CloseEvent("close"))
}
return
}
case "Ping": {
this.client?.send("Pong").catch((error) => {
console.error("error while sending Pong data", message)
if (this.onerror) {
this.onerror(new Event("error"))
}
})
return
}
default: {
if (this.onmessage) {
this.onmessage(
new MessageEvent("message", {
data: message.data,
origin: this.url,
})
)
}
}
}
}
}

View File

@@ -1,4 +1,5 @@
import { getService } from "@hoppscotch/common/modules/dioc"
import axios from "axios"
import {
AuthEvent,
AuthPlatformDef,
@@ -13,15 +14,50 @@ import { open } from '@tauri-apps/api/shell'
import { BehaviorSubject, Subject } from "rxjs"
import { Store } from "tauri-plugin-store-api"
import { Ref, ref, watch } from "vue"
import { z } from "zod"
import * as E from "fp-ts/Either"
import { subscriptionExchange } from "@urql/core"
import {
getSubscriptionClient,
tauriGQLFetchExchange,
} from "../helpers/GQLClient"
export const authEvents$ = new Subject<AuthEvent | { event: "token_refresh" }>()
const currentUser$ = new BehaviorSubject<HoppUser | null>(null)
export const probableUser$ = new BehaviorSubject<HoppUser | null>(null)
const APP_DATA_PATH = "~/.hopp-desktop-app-data.dat"
export const APP_DATA_PATH = "~/.hopp-desktop-app-data.dat"
const persistenceService = getService(PersistenceService)
const expectedAllowedProvidersSchema = z.object({
// currently supported values are "GOOGLE", "GITHUB", "EMAIL", "MICROSOFT", "SAML"
// keeping it as string to avoid backend accidentally breaking frontend when adding new providers
providers: z.array(z.string()),
})
export const getAllowedAuthProviders = async () => {
try {
const res = await axios.get(
`${import.meta.env.VITE_BACKEND_API_URL}/auth/providers`,
{
withCredentials: true,
}
)
const parseResult = expectedAllowedProvidersSchema.safeParse(res.data)
if (!parseResult.success) {
return E.left("SOMETHING_WENT_WRONG")
}
return E.right(parseResult.data.providers)
} catch (_) {
return E.left("SOMETHING_WENT_WRONG")
}
}
async function logout() {
let client = await getClient();
await client.get(`${import.meta.env.VITE_BACKEND_API_URL}/auth/logout`)
@@ -37,7 +73,7 @@ async function signInUserWithGithubFB() {
}
async function signInUserWithGoogleFB() {
await open(`${import.meta.env.VITE_BACKEND_API_URL}/auth/google?redirect_uri=desktop`);
await open(`${import.meta.env.VITE_BACKEND_LOGIN_API_URL}/authenticate`);
}
async function signInUserWithMicrosoftFB() {
@@ -224,6 +260,15 @@ export const def: AuthPlatformDef = {
},
getGQLClientOptions() {
return {
exchanges: [
subscriptionExchange({
forwardSubscription(fetchBody) {
const subscriptionClient = getSubscriptionClient()
return subscriptionClient!.request(fetchBody)
},
}),
tauriGQLFetchExchange(new Store(APP_DATA_PATH)),
],
fetchOptions: {
credentials: "include",
},
@@ -371,4 +416,5 @@ export const def: AuthPlatformDef = {
event: "logout",
})
},
getAllowedAuthProviders,
}