Skip to content

Commit

Permalink
chore(seaside): #185 fix regexp backtracking vulnerability
Browse files Browse the repository at this point in the history
  • Loading branch information
Marthym committed Nov 19, 2023
1 parent 0f11945 commit 0aafe9e
Show file tree
Hide file tree
Showing 3 changed files with 31 additions and 41 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ import UserRoleInput from "@/administration/component/usereditor/UserRoleInput.v
const CANCEL_EVENT: string = 'cancel';
const SUBMIT_EVENT: string = 'submit';
const CHANGE_EVENT: string = 'change';
const MAIL_PATTERN = /^\w+([.-]?\w+)*@\w+([.-]?\w+)*(\.\w{2,3})+$/;
const MAIL_PATTERN = /^[a-zA-Z0-9_+&*-]+(?:\.[a-zA-Z0-9_+&*-]+)*@(?:[a-zA-Z0-9-]+\.)+[a-zA-Z]{2,7}$/;
const ULID_PATTERN = /^[A-Z]*(:[A-Z]{2}[0-7][0-9A-HJKMNP-TV-Z]{25})?$/;
@Component({
Expand Down
53 changes: 24 additions & 29 deletions seaside/src/configuration/services/FeedService.ts
Original file line number Diff line number Diff line change
@@ -1,20 +1,15 @@
import {map, switchMap, take} from "rxjs/operators";
import {HttpStatusError} from "@/common/errors/HttpStatusError";
import {Feed} from "@/configuration/model/Feed.type";
import {Page} from "@/services/model/Page";
import {from, Observable, of, throwError} from "rxjs";
import { map, switchMap, take } from 'rxjs/operators';
import { HttpStatusError } from '@/common/errors/HttpStatusError';
import { Feed } from '@/configuration/model/Feed.type';
import { Page } from '@/services/model/Page';
import { from, Observable, of, throwError } from 'rxjs';
import rest from '@/common/services/RestWrapper';
import {OpPatch} from "json-patch";
import {AtomFeed, ScrapFeedHeaderResponse} from "@/configuration/model/GraphQLScraper.type";
import {SearchFeedsRequest, SearchFeedsResponse} from "@/configuration/model/SearchFeedsResponse.type";
import {send} from "@/common/services/GraphQLClient";

export const URL_PATTERN = new RegExp('^(https?:\\/\\/)?' + // protocol
'((([a-z\\d]([a-z\\d-]*[a-z\\d])*)\\.)+[a-z]{2,}|' + // domain name
'((\\d{1,3}\\.){3}\\d{1,3}))' + // OR ip (v4) address
'(\\:\\d+)?(\\/[-a-z\\d%_.~+]*)*' + // port and path
'(\\?[;&a-z\\d%_.~+=-]*)?' + // query string
'(\\#[-a-z\\d_]*)?$', 'i'); // fragment locator
import { OpPatch } from 'json-patch';
import { AtomFeed, ScrapFeedHeaderResponse } from '@/configuration/model/GraphQLScraper.type';
import { SearchFeedsRequest, SearchFeedsResponse } from '@/configuration/model/SearchFeedsResponse.type';
import { send } from '@/common/services/GraphQLClient';

export const URL_PATTERN = /^(((https?):\/\/)(%[0-9A-Fa-f]{2}|[-()_.!~*';/?:@&=+$,A-Za-z0-9])+)([).!';/?:,][[:blank:]])?$/;

export class FeedService {

Expand All @@ -26,7 +21,7 @@ export class FeedService {
totalCount
entities {_id name location tags}
}
}`
}`;

private static readonly SCRAP_FEED_HEAD_REQUEST = `#graphql
query ScrapFeedHeader($link: URI!) {
Expand Down Expand Up @@ -56,7 +51,7 @@ export class FeedService {
totalPage: Math.ceil(
res.data.feedsSearch.totalCount / (options._pp | FeedService.DEFAULT_PER_PAGE)),
data: of(res.data.feedsSearch.entities),
}
};
}),
take(1),
);
Expand All @@ -65,7 +60,7 @@ export class FeedService {
public add(feed: Feed): Observable<Feed> {
return rest.post('/feeds', feed).pipe(
switchMap(this.responseToFeed),
take(1)
take(1),
);
}

Expand All @@ -74,38 +69,38 @@ export class FeedService {
return rest.put(`/feeds/${feed._id}`, feed).pipe(
switchMap(this.responseToFeed),
map((updatedFeed: Feed) => updatedFeed._id),
take(1)
take(1),
);
} else {
const jsonPatch: OpPatch[] = [];
jsonPatch.push({op: 'remove', path: `/feeds/${feed._id}`});
jsonPatch.push({op: 'add', path: '/feeds', value: feed});
jsonPatch.push({ op: 'remove', path: `/feeds/${feed._id}` });
jsonPatch.push({ op: 'add', path: '/feeds', value: feed });

return this.patch(jsonPatch).pipe(
map(updated => updated.pop())
map(updated => updated.pop()),
);
}
}

public remove(id: string): Observable<Feed> {
return rest.delete(`/feeds/${id}`).pipe(
switchMap(this.responseToFeed),
take(1)
take(1),
);
}

public bulkRemove(ids: string[]): Observable<number> {
const jsonPatch: OpPatch[] = [];
ids.forEach(id => jsonPatch.push({op: 'remove', path: `/feeds/${id}`}));
ids.forEach(id => jsonPatch.push({ op: 'remove', path: `/feeds/${id}` }));
return this.patch(jsonPatch).pipe(
map(deleted => deleted.length)
map(deleted => deleted.length),
);
}

private patch(payload: OpPatch[]): Observable<string[]> {
return rest.patch('/feeds', payload).pipe(
switchMap(this.responseToFeed),
take(1)
take(1),
);
}

Expand All @@ -125,7 +120,7 @@ export class FeedService {
return throwError(() => new Error('Argument link must be a valid URL !'));
}

return send<ScrapFeedHeaderResponse>(FeedService.SCRAP_FEED_HEAD_REQUEST, {link: link}).pipe(
return send<ScrapFeedHeaderResponse>(FeedService.SCRAP_FEED_HEAD_REQUEST, { link: link }).pipe(
map(data => data.data.scrapFeedHeader),
map((atom: AtomFeed) => ({
name: atom.title,
Expand All @@ -140,7 +135,7 @@ export class FeedService {
return throwError(() => new Error('Feed id is mandatory !'));
}

return send<{ subscribe: Feed }>(FeedService.FEED_SUBSCRIBE, {feedId: id}).pipe(
return send<{ subscribe: Feed }>(FeedService.FEED_SUBSCRIBE, { feedId: id }).pipe(
map(data => data.data.subscribe),
take(1),
);
Expand Down
17 changes: 6 additions & 11 deletions seaside/src/layout/services/ScraperService.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,8 @@
import {Observable, throwError} from "rxjs";
import {map, take} from "rxjs/operators";
import {send} from "@/common/services/GraphQLClient";
import { Observable, throwError } from 'rxjs';
import { map, take } from 'rxjs/operators';
import { send } from '@/common/services/GraphQLClient';

const URL_PATTERN = new RegExp('^(https?:\\/\\/)?' + // protocol
'((([a-z\\d]([a-z\\d-]*[a-z\\d])*)\\.)+[a-z]{2,}|' + // domain name
'((\\d{1,3}\\.){3}\\d{1,3}))' + // OR ip (v4) address
'(\\:\\d+)?(\\/[-a-z\\d%_.~+@]*)*' + // port and path
'(\\?[;&a-z\\d%_.~+=-]*)?' + // query string
'(\\#[-a-z\\d_]*)?$', 'i'); // fragment locator
const URL_PATTERN = /^(((https?):\/\/)(%[0-9A-Fa-f]{2}|[-()_.!~*';/?:@&=+$,A-Za-z0-9])+)([).!';/?:,][[:blank:]])?$/;

export class ScraperService {
private static readonly SCRAP_SINGLE_NEWS_REQUEST = `#graphql
Expand All @@ -22,9 +17,9 @@ export class ScraperService {
return throwError(() => new Error('Argument link must be a valid URL !'));
}

return send(ScraperService.SCRAP_SINGLE_NEWS_REQUEST, {newsLink: link}).pipe(
return send(ScraperService.SCRAP_SINGLE_NEWS_REQUEST, { newsLink: link }).pipe(
take(1),
map(() => undefined)
map(() => undefined),
);
}
}
Expand Down

0 comments on commit 0aafe9e

Please sign in to comment.