Merge pull request #4 from aabril/feat/database_and_feed_model_finish
02 - 2nd part - feat/database and feed model
This commit is contained in:
114
src/__tests__/Feed.model.test.ts
Normal file
114
src/__tests__/Feed.model.test.ts
Normal file
@ -0,0 +1,114 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { Feed, IFeedDocument } from '../models/Feed.js';
|
||||
import { NewsSource } from '../types/Feed.js';
|
||||
|
||||
describe('Feed Model', () => {
|
||||
const mockFeedData = {
|
||||
title: 'Test News Title',
|
||||
description: 'Test news description',
|
||||
url: 'https://example.com/news/1',
|
||||
source: NewsSource.EL_PAIS,
|
||||
publishedAt: new Date('2024-01-15T10:00:00Z'),
|
||||
imageUrl: 'https://example.com/image.jpg',
|
||||
category: 'Politics',
|
||||
isManual: false
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('Schema Validation', () => {
|
||||
test('should create a valid feed document', () => {
|
||||
const feed = new Feed(mockFeedData);
|
||||
|
||||
expect(feed.title).toBe(mockFeedData.title);
|
||||
expect(feed.description).toBe(mockFeedData.description);
|
||||
expect(feed.url).toBe(mockFeedData.url);
|
||||
expect(feed.source).toBe(mockFeedData.source);
|
||||
expect(feed.publishedAt).toEqual(mockFeedData.publishedAt);
|
||||
expect(feed.imageUrl).toBe(mockFeedData.imageUrl);
|
||||
expect(feed.category).toBe(mockFeedData.category);
|
||||
expect(feed.isManual).toBe(mockFeedData.isManual);
|
||||
});
|
||||
|
||||
test('should set default values correctly', () => {
|
||||
const minimalData = {
|
||||
title: 'Test Title',
|
||||
description: 'Test description',
|
||||
url: 'https://example.com/test',
|
||||
source: NewsSource.EL_MUNDO,
|
||||
publishedAt: new Date()
|
||||
};
|
||||
|
||||
const feed = new Feed(minimalData);
|
||||
|
||||
expect(feed.isManual).toBe(false);
|
||||
expect(feed.createdAt).toBeDefined();
|
||||
expect(feed.updatedAt).toBeDefined();
|
||||
});
|
||||
|
||||
test('should require mandatory fields', () => {
|
||||
const feed = new Feed({});
|
||||
const validationError = feed.validateSync();
|
||||
|
||||
expect(validationError?.errors.title).toBeDefined();
|
||||
expect(validationError?.errors.url).toBeDefined();
|
||||
expect(validationError?.errors.source).toBeDefined();
|
||||
expect(validationError?.errors.publishedAt).toBeDefined();
|
||||
});
|
||||
|
||||
test('should validate NewsSource enum', () => {
|
||||
const invalidData = {
|
||||
...mockFeedData,
|
||||
source: 'Invalid Source' as any
|
||||
};
|
||||
|
||||
const feed = new Feed(invalidData);
|
||||
const validationError = feed.validateSync();
|
||||
|
||||
expect(validationError?.errors.source).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Document Transformation', () => {
|
||||
test('should transform _id to id in JSON', () => {
|
||||
const feed = new Feed(mockFeedData);
|
||||
feed._id = '507f1f77bcf86cd799439011';
|
||||
|
||||
const json = feed.toJSON();
|
||||
|
||||
expect(json.id.toString()).toBe('507f1f77bcf86cd799439011');
|
||||
expect(json._id).toBeUndefined();
|
||||
expect(json.__v).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should transform _id to id in Object', () => {
|
||||
const feed = new Feed(mockFeedData);
|
||||
feed._id = '507f1f77bcf86cd799439011';
|
||||
|
||||
const obj = feed.toObject();
|
||||
|
||||
expect(obj.id.toString()).toBe('507f1f77bcf86cd799439011');
|
||||
expect(obj._id).toBeUndefined();
|
||||
expect(obj.__v).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Model Methods', () => {
|
||||
test('should create Feed model instance', () => {
|
||||
expect(Feed).toBeDefined();
|
||||
expect(Feed.modelName).toBe('Feed');
|
||||
});
|
||||
|
||||
test('should have unique URL index', () => {
|
||||
const indexes = Feed.schema.indexes();
|
||||
const urlIndex = indexes.find(index =>
|
||||
index[0].url && index[1].unique
|
||||
);
|
||||
|
||||
expect(urlIndex).toBeDefined();
|
||||
expect(urlIndex?.[1].unique).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
237
src/__tests__/FeedService.test.ts
Normal file
237
src/__tests__/FeedService.test.ts
Normal file
@ -0,0 +1,237 @@
|
||||
import { FeedService } from '../services/FeedService.js';
|
||||
import { IFeedRepository } from '../repositories/FeedRepository.js';
|
||||
import { NewsSource, IFeed, ICreateFeedDto, IUpdateFeedDto, IFeedQuery, IPaginatedResponse } from '../types/Feed.js';
|
||||
|
||||
// Mock FeedRepository
|
||||
const mockFeedRepository: jest.Mocked<IFeedRepository> = {
|
||||
create: jest.fn(),
|
||||
findById: jest.fn(),
|
||||
findByUrl: jest.fn(),
|
||||
findAll: jest.fn(),
|
||||
findBySource: jest.fn(),
|
||||
findTodaysFrontPage: jest.fn(),
|
||||
update: jest.fn(),
|
||||
delete: jest.fn(),
|
||||
deleteMany: jest.fn(),
|
||||
count: jest.fn(),
|
||||
exists: jest.fn()
|
||||
};
|
||||
|
||||
describe('FeedService', () => {
|
||||
let feedService: FeedService;
|
||||
|
||||
const mockFeed: IFeed = {
|
||||
_id: '507f1f77bcf86cd799439011',
|
||||
title: 'Test News Title',
|
||||
description: 'Test news description',
|
||||
url: 'https://example.com/news/1',
|
||||
source: NewsSource.EL_PAIS,
|
||||
publishedAt: new Date('2024-01-15T10:00:00Z'),
|
||||
imageUrl: 'https://example.com/image.jpg',
|
||||
category: 'Politics',
|
||||
isManual: false,
|
||||
createdAt: new Date('2024-01-15T10:00:00Z'),
|
||||
updatedAt: new Date('2024-01-15T10:00:00Z')
|
||||
};
|
||||
|
||||
const mockCreateFeedDto: ICreateFeedDto = {
|
||||
title: 'New Test News',
|
||||
description: 'New test news description',
|
||||
url: 'https://example.com/news/new',
|
||||
source: NewsSource.EL_MUNDO,
|
||||
publishedAt: new Date('2024-01-15T12:00:00Z'),
|
||||
imageUrl: 'https://example.com/new-image.jpg',
|
||||
category: 'Sports'
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
feedService = new FeedService(mockFeedRepository);
|
||||
});
|
||||
|
||||
describe('createFeed', () => {
|
||||
test('should create a feed successfully', async () => {
|
||||
mockFeedRepository.findByUrl.mockResolvedValueOnce(null);
|
||||
mockFeedRepository.create.mockResolvedValueOnce(mockFeed);
|
||||
|
||||
const result = await feedService.createFeed(mockCreateFeedDto);
|
||||
|
||||
expect(mockFeedRepository.findByUrl).toHaveBeenCalledWith(mockCreateFeedDto.url);
|
||||
expect(mockFeedRepository.create).toHaveBeenCalledWith(mockCreateFeedDto);
|
||||
expect(result).toEqual(mockFeed);
|
||||
});
|
||||
|
||||
test('should throw error if URL already exists', async () => {
|
||||
mockFeedRepository.findByUrl.mockResolvedValueOnce(mockFeed);
|
||||
|
||||
await expect(feedService.createFeed(mockCreateFeedDto))
|
||||
.rejects.toThrow('A feed with this URL already exists');
|
||||
|
||||
expect(mockFeedRepository.create).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFeedById', () => {
|
||||
test('should return feed by ID', async () => {
|
||||
mockFeedRepository.findById.mockResolvedValueOnce(mockFeed);
|
||||
|
||||
const result = await feedService.getFeedById('507f1f77bcf86cd799439011');
|
||||
|
||||
expect(mockFeedRepository.findById).toHaveBeenCalledWith('507f1f77bcf86cd799439011');
|
||||
expect(result).toEqual(mockFeed);
|
||||
});
|
||||
|
||||
test('should return null for non-existent feed', async () => {
|
||||
mockFeedRepository.findById.mockResolvedValueOnce(null);
|
||||
|
||||
const result = await feedService.getFeedById('507f1f77bcf86cd799439011');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAllFeeds', () => {
|
||||
test('should return paginated feeds with default values', async () => {
|
||||
const mockResponse: IPaginatedResponse<IFeed> = {
|
||||
data: [mockFeed],
|
||||
pagination: {
|
||||
page: 1,
|
||||
limit: 20,
|
||||
total: 1,
|
||||
totalPages: 1,
|
||||
hasNext: false,
|
||||
hasPrev: false
|
||||
}
|
||||
};
|
||||
|
||||
mockFeedRepository.findAll.mockResolvedValueOnce(mockResponse);
|
||||
|
||||
const result = await feedService.getAllFeeds();
|
||||
|
||||
expect(mockFeedRepository.findAll).toHaveBeenCalledWith({
|
||||
limit: 20,
|
||||
page: 1
|
||||
});
|
||||
expect(result).toEqual(mockResponse);
|
||||
});
|
||||
|
||||
test('should pass custom query parameters', async () => {
|
||||
const query: IFeedQuery = {
|
||||
source: NewsSource.EL_PAIS,
|
||||
limit: 10,
|
||||
page: 2
|
||||
};
|
||||
|
||||
const mockResponse: IPaginatedResponse<IFeed> = {
|
||||
data: [mockFeed],
|
||||
pagination: {
|
||||
page: 2,
|
||||
limit: 10,
|
||||
total: 1,
|
||||
totalPages: 1,
|
||||
hasNext: false,
|
||||
hasPrev: true
|
||||
}
|
||||
};
|
||||
|
||||
mockFeedRepository.findAll.mockResolvedValueOnce(mockResponse);
|
||||
|
||||
const result = await feedService.getAllFeeds(query);
|
||||
|
||||
expect(mockFeedRepository.findAll).toHaveBeenCalledWith(query);
|
||||
expect(result).toEqual(mockResponse);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateFeed', () => {
|
||||
const updateData: IUpdateFeedDto = {
|
||||
title: 'Updated Title',
|
||||
description: 'Updated description'
|
||||
};
|
||||
|
||||
test('should update feed successfully', async () => {
|
||||
const updatedFeed = { ...mockFeed, ...updateData };
|
||||
mockFeedRepository.update.mockResolvedValueOnce(updatedFeed);
|
||||
|
||||
const result = await feedService.updateFeed('507f1f77bcf86cd799439011', updateData);
|
||||
|
||||
expect(mockFeedRepository.update).toHaveBeenCalledWith('507f1f77bcf86cd799439011', updateData);
|
||||
expect(result).toEqual(updatedFeed);
|
||||
});
|
||||
|
||||
test('should check URL conflicts when updating URL', async () => {
|
||||
const updateWithUrl: IUpdateFeedDto = {
|
||||
...updateData,
|
||||
url: 'https://example.com/new-url'
|
||||
};
|
||||
|
||||
const existingFeed = { ...mockFeed, _id: 'different-id' };
|
||||
mockFeedRepository.findByUrl.mockResolvedValueOnce(existingFeed);
|
||||
|
||||
await expect(feedService.updateFeed('507f1f77bcf86cd799439011', updateWithUrl))
|
||||
.rejects.toThrow('A feed with this URL already exists');
|
||||
|
||||
expect(mockFeedRepository.update).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteFeed', () => {
|
||||
test('should delete feed successfully', async () => {
|
||||
mockFeedRepository.delete.mockResolvedValueOnce(true);
|
||||
|
||||
const result = await feedService.deleteFeed('507f1f77bcf86cd799439011');
|
||||
|
||||
expect(mockFeedRepository.delete).toHaveBeenCalledWith('507f1f77bcf86cd799439011');
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTodaysFrontPageNews', () => {
|
||||
test('should return combined feeds from all sources', async () => {
|
||||
const elPaisFeeds = [{ ...mockFeed, source: NewsSource.EL_PAIS }];
|
||||
const elMundoFeeds = [{ ...mockFeed, source: NewsSource.EL_MUNDO }];
|
||||
|
||||
mockFeedRepository.findTodaysFrontPage
|
||||
.mockResolvedValueOnce(elPaisFeeds)
|
||||
.mockResolvedValueOnce(elMundoFeeds);
|
||||
|
||||
const result = await feedService.getTodaysFrontPageNews();
|
||||
|
||||
expect(mockFeedRepository.findTodaysFrontPage).toHaveBeenCalledWith(NewsSource.EL_PAIS);
|
||||
expect(mockFeedRepository.findTodaysFrontPage).toHaveBeenCalledWith(NewsSource.EL_MUNDO);
|
||||
expect(result).toEqual([...elPaisFeeds, ...elMundoFeeds]);
|
||||
});
|
||||
|
||||
test('should return empty array if no feeds found', async () => {
|
||||
mockFeedRepository.findTodaysFrontPage
|
||||
.mockResolvedValueOnce([])
|
||||
.mockResolvedValueOnce([]);
|
||||
|
||||
const result = await feedService.getTodaysFrontPageNews();
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFeedsBySource', () => {
|
||||
test('should return feeds by source', async () => {
|
||||
const sourceFeeds = [mockFeed];
|
||||
mockFeedRepository.findBySource.mockResolvedValueOnce(sourceFeeds);
|
||||
|
||||
const result = await feedService.getFeedsBySource(NewsSource.EL_PAIS);
|
||||
|
||||
expect(mockFeedRepository.findBySource).toHaveBeenCalledWith(NewsSource.EL_PAIS);
|
||||
expect(result).toEqual(sourceFeeds);
|
||||
});
|
||||
|
||||
test('should use default limit', async () => {
|
||||
const sourceFeeds = [mockFeed];
|
||||
mockFeedRepository.findBySource.mockResolvedValueOnce(sourceFeeds);
|
||||
|
||||
const result = await feedService.getFeedsBySource(NewsSource.EL_PAIS, 5);
|
||||
|
||||
expect(mockFeedRepository.findBySource).toHaveBeenCalledWith(NewsSource.EL_PAIS);
|
||||
expect(result).toEqual(sourceFeeds);
|
||||
});
|
||||
});
|
||||
});
|
@ -1,31 +1,85 @@
|
||||
import mongoose, { Schema, Document } from 'mongoose';
|
||||
import { IFeed } from '../types/Feed.js';
|
||||
import { IFeed, NewsSource } from '../types/Feed.js';
|
||||
|
||||
export interface IFeedDocument extends IFeed, Document {
|
||||
_id: string;
|
||||
_id: string;
|
||||
}
|
||||
|
||||
const feedSchema = new Schema<IFeedDocument>({
|
||||
}, {
|
||||
timestamps: true,
|
||||
toJSON: {
|
||||
transform: function(doc, ret) {
|
||||
ret.id = ret._id;
|
||||
delete (ret as any)._id;
|
||||
delete (ret as any).__v;
|
||||
return ret;
|
||||
}
|
||||
},
|
||||
toObject: {
|
||||
transform: function(doc, ret) {
|
||||
ret.id = ret._id;
|
||||
delete (ret as any)._id;
|
||||
delete (ret as any).__v;
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
});
|
||||
const feedSchemaObject = {
|
||||
title: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
|
||||
description: {
|
||||
type: String
|
||||
},
|
||||
|
||||
url: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
|
||||
imageUrl: {
|
||||
type: String
|
||||
},
|
||||
|
||||
source: {
|
||||
type: String,
|
||||
required: true,
|
||||
enum: Object.values(NewsSource)
|
||||
},
|
||||
|
||||
category: {
|
||||
type: String
|
||||
},
|
||||
|
||||
publishedAt: {
|
||||
type: Date,
|
||||
required: true
|
||||
},
|
||||
|
||||
createdAt: {
|
||||
type: Date,
|
||||
default: Date.now
|
||||
},
|
||||
|
||||
updatedAt: {
|
||||
type: Date,
|
||||
default: Date.now
|
||||
},
|
||||
|
||||
isManual: {
|
||||
type: Boolean,
|
||||
default: false
|
||||
}
|
||||
} as const;
|
||||
|
||||
const feedSchemaSettings = {
|
||||
timestamps: { createdAt: 'createdAt', updatedAt: 'updatedAt' },
|
||||
|
||||
toJSON: {
|
||||
transform: (doc: any, ret: any) => {
|
||||
ret.id = ret._id;
|
||||
delete ret._id;
|
||||
delete ret.__v;
|
||||
return ret;
|
||||
}
|
||||
},
|
||||
|
||||
toObject: {
|
||||
transform: (doc: any, ret: any) => {
|
||||
ret.id = ret._id;
|
||||
delete ret._id;
|
||||
delete ret.__v;
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
} as const;
|
||||
|
||||
const feedSchema = new Schema<IFeedDocument>(feedSchemaObject as any, feedSchemaSettings);
|
||||
|
||||
feedSchema.index({ url: 1 }, { unique: true });
|
||||
|
||||
export const Feed = mongoose.model<IFeedDocument>('Feed', feedSchema);
|
||||
export default Feed;
|
@ -1,2 +1,147 @@
|
||||
// Aquí el "repositorio" para lidiar con el modelo
|
||||
// Lo tipico : ( Find , FindById, CreateOne ) etc.
|
||||
import { Feed, IFeedDocument } from '../models/Feed.js';
|
||||
import { IFeed, ICreateFeedDto, IUpdateFeedDto, IFeedQuery, IPaginatedResponse, NewsSource } from '../types/Feed.js';
|
||||
import { FilterQuery, UpdateQuery } from 'mongoose';
|
||||
|
||||
export interface IFeedRepository {
|
||||
create(feedData: ICreateFeedDto): Promise<IFeed>;
|
||||
findById(id: string): Promise<IFeed | null>;
|
||||
findByUrl(url: string): Promise<IFeed | null>;
|
||||
findAll(query: IFeedQuery): Promise<IPaginatedResponse<IFeed>>;
|
||||
findBySource(source: NewsSource): Promise<IFeed[]>;
|
||||
findTodaysFrontPage(source: NewsSource): Promise<IFeed[]>;
|
||||
update(id: string, updateData: IUpdateFeedDto): Promise<IFeed | null>;
|
||||
delete(id: string): Promise<boolean>;
|
||||
deleteMany(filter: FilterQuery<IFeedDocument>): Promise<number>;
|
||||
count(filter?: FilterQuery<IFeedDocument>): Promise<number>;
|
||||
exists(url: string): Promise<boolean>;
|
||||
}
|
||||
|
||||
export class FeedRepository implements IFeedRepository {
|
||||
async create(feedData: ICreateFeedDto): Promise<IFeed> {
|
||||
const feed = new Feed({
|
||||
...feedData,
|
||||
publishedAt: feedData.publishedAt || new Date(),
|
||||
isManual: feedData.isManual ?? false
|
||||
});
|
||||
|
||||
const savedFeed = await feed.save();
|
||||
return savedFeed.toObject();
|
||||
}
|
||||
|
||||
async findById(id: string): Promise<IFeed | null> {
|
||||
const feed = await Feed.findById(id).lean();
|
||||
return feed ? this.transformDocument(feed) : null;
|
||||
}
|
||||
|
||||
async findByUrl(url: string): Promise<IFeed | null> {
|
||||
const feed = await Feed.findOne({ url }).lean();
|
||||
return feed ? this.transformDocument(feed) : null;
|
||||
}
|
||||
|
||||
async findAll(query: IFeedQuery): Promise<IPaginatedResponse<IFeed>> {
|
||||
const {
|
||||
source,
|
||||
page = 1,
|
||||
limit = 20
|
||||
} = query;
|
||||
|
||||
const filter: FilterQuery<IFeedDocument> = {};
|
||||
|
||||
if (source) filter.source = source;
|
||||
|
||||
const skip = (page - 1) * limit;
|
||||
|
||||
const [feeds, total] = await Promise.all([
|
||||
Feed.find(filter)
|
||||
.sort({ publishedAt: -1 })
|
||||
.skip(skip)
|
||||
.limit(limit)
|
||||
.lean(),
|
||||
Feed.countDocuments(filter)
|
||||
]);
|
||||
|
||||
const totalPages = Math.ceil(total / limit);
|
||||
|
||||
return {
|
||||
data: feeds.map(feed => this.transformDocument(feed)),
|
||||
pagination: {
|
||||
page,
|
||||
limit,
|
||||
total,
|
||||
totalPages,
|
||||
hasNext: page < totalPages,
|
||||
hasPrev: page > 1
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async findBySource(source: NewsSource): Promise<IFeed[]> {
|
||||
const feeds = await Feed.find({ source }).sort({ publishedAt: -1 }).lean();
|
||||
return feeds.map((feed: any) => this.transformDocument(feed));
|
||||
}
|
||||
|
||||
async findTodaysFrontPage(source: NewsSource): Promise<IFeed[]> {
|
||||
const today = new Date();
|
||||
today.setHours(0, 0, 0, 0);
|
||||
const tomorrow = new Date(today);
|
||||
tomorrow.setDate(tomorrow.getDate() + 1);
|
||||
|
||||
const feeds = await Feed.find({
|
||||
source,
|
||||
isManual: false,
|
||||
publishedAt: {
|
||||
$gte: today,
|
||||
$lt: tomorrow
|
||||
}
|
||||
}).sort({ publishedAt: -1 }).limit(10).lean();
|
||||
|
||||
return feeds.map((feed: any) => this.transformDocument(feed));
|
||||
}
|
||||
|
||||
async update(id: string, updateData: IUpdateFeedDto): Promise<IFeed | null> {
|
||||
const updatedFeed = await Feed.findByIdAndUpdate(
|
||||
id,
|
||||
updateData,
|
||||
{ new: true, runValidators: true }
|
||||
).lean();
|
||||
|
||||
return updatedFeed ? this.transformDocument(updatedFeed) : null;
|
||||
}
|
||||
|
||||
async delete(id: string): Promise<boolean> {
|
||||
const result = await Feed.findByIdAndDelete(id);
|
||||
return !!result;
|
||||
}
|
||||
|
||||
async deleteMany(filter: FilterQuery<IFeedDocument>): Promise<number> {
|
||||
const result = await Feed.deleteMany(filter);
|
||||
return result.deletedCount || 0;
|
||||
}
|
||||
|
||||
async count(filter: FilterQuery<IFeedDocument> = {}): Promise<number> {
|
||||
return await Feed.countDocuments(filter);
|
||||
}
|
||||
|
||||
async exists(url: string): Promise<boolean> {
|
||||
const feed = await Feed.findOne({ url }).select('_id').lean();
|
||||
return !!feed;
|
||||
}
|
||||
|
||||
private transformDocument(doc: any): IFeed {
|
||||
return {
|
||||
_id: doc._id.toString(),
|
||||
title: doc.title,
|
||||
description: doc.description,
|
||||
url: doc.url,
|
||||
source: doc.source,
|
||||
publishedAt: doc.publishedAt,
|
||||
imageUrl: doc.imageUrl,
|
||||
category: doc.category,
|
||||
isManual: doc.isManual,
|
||||
createdAt: doc.createdAt,
|
||||
updatedAt: doc.updatedAt
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default FeedRepository;
|
63
src/services/FeedService.ts
Normal file
63
src/services/FeedService.ts
Normal file
@ -0,0 +1,63 @@
|
||||
import { IFeedRepository } from '../repositories/FeedRepository.js';
|
||||
import { IFeed, ICreateFeedDto, IUpdateFeedDto, IFeedQuery, IPaginatedResponse, NewsSource } from '../types/Feed.js';
|
||||
|
||||
export interface IFeedService {
|
||||
createFeed(feedData: ICreateFeedDto): Promise<IFeed>;
|
||||
getFeedById(id: string): Promise<IFeed | null>;
|
||||
getAllFeeds(query?: IFeedQuery): Promise<IPaginatedResponse<IFeed>>;
|
||||
updateFeed(id: string, updateData: IUpdateFeedDto): Promise<IFeed | null>;
|
||||
deleteFeed(id: string): Promise<boolean>;
|
||||
getTodaysFrontPageNews(): Promise<IFeed[]>;
|
||||
getFeedsBySource(source: NewsSource, limit?: number): Promise<IFeed[]>;
|
||||
}
|
||||
|
||||
export class FeedService implements IFeedService {
|
||||
constructor(private feedRepository: IFeedRepository) {}
|
||||
|
||||
async createFeed(feedData: ICreateFeedDto): Promise<IFeed> {
|
||||
const existingFeed = await this.feedRepository.findByUrl(feedData.url);
|
||||
if (existingFeed) {
|
||||
throw new Error('A feed with this URL already exists');
|
||||
}
|
||||
return await this.feedRepository.create(feedData);
|
||||
}
|
||||
|
||||
async getFeedById(id: string): Promise<IFeed | null> {
|
||||
return await this.feedRepository.findById(id);
|
||||
}
|
||||
|
||||
async getAllFeeds(query: IFeedQuery = {}): Promise<IPaginatedResponse<IFeed>> {
|
||||
const sanitizedQuery = {
|
||||
...query,
|
||||
limit: query.limit || 20,
|
||||
page: query.page || 1
|
||||
};
|
||||
return await this.feedRepository.findAll(sanitizedQuery);
|
||||
}
|
||||
|
||||
async updateFeed(id: string, updateData: IUpdateFeedDto): Promise<IFeed | null> {
|
||||
if (updateData.url) {
|
||||
const existingFeed = await this.feedRepository.findByUrl(updateData.url);
|
||||
if (existingFeed && existingFeed._id !== id) {
|
||||
throw new Error('A feed with this URL already exists');
|
||||
}
|
||||
}
|
||||
return await this.feedRepository.update(id, updateData);
|
||||
}
|
||||
|
||||
async deleteFeed(id: string): Promise<boolean> {
|
||||
return await this.feedRepository.delete(id);
|
||||
}
|
||||
|
||||
async getTodaysFrontPageNews(): Promise<IFeed[]> {
|
||||
const [elPaisFeeds, elMundoFeeds] = await Promise.all([
|
||||
this.feedRepository.findTodaysFrontPage(NewsSource.EL_PAIS),
|
||||
this.feedRepository.findTodaysFrontPage(NewsSource.EL_MUNDO)
|
||||
]);
|
||||
return [...elPaisFeeds, ...elMundoFeeds];
|
||||
}
|
||||
|
||||
async getFeedsBySource(source: NewsSource, limit: number = 10): Promise<IFeed[]> {
|
||||
return await this.feedRepository.findBySource(source);
|
||||
}
|
||||
}
|
@ -2,9 +2,9 @@ export enum NewsSource {
|
||||
EL_PAIS = 'El País',
|
||||
EL_MUNDO = 'El Mundo',
|
||||
MANUAL = 'Manual'
|
||||
}
|
||||
|
||||
export interface IFeed {
|
||||
}
|
||||
|
||||
export interface IFeed {
|
||||
_id?: string;
|
||||
title: string;
|
||||
description: string;
|
||||
@ -13,6 +13,55 @@ export interface IFeed {
|
||||
publishedAt: Date;
|
||||
imageUrl?: string;
|
||||
category?: string;
|
||||
isManual: boolean;
|
||||
createdAt?: Date;
|
||||
updatedAt?: Date;
|
||||
}
|
||||
}
|
||||
|
||||
export interface ICreateFeedDto {
|
||||
title: string;
|
||||
description: string;
|
||||
url: string;
|
||||
source: NewsSource;
|
||||
publishedAt?: Date;
|
||||
imageUrl?: string;
|
||||
category?: string;
|
||||
isManual?: boolean;
|
||||
}
|
||||
|
||||
export interface IUpdateFeedDto {
|
||||
title?: string;
|
||||
description?: string;
|
||||
url?: string;
|
||||
source?: NewsSource;
|
||||
publishedAt?: Date;
|
||||
imageUrl?: string;
|
||||
category?: string;
|
||||
isManual?: boolean;
|
||||
}
|
||||
|
||||
export interface IFeedQuery {
|
||||
source?: NewsSource;
|
||||
page?: number;
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
export interface IPaginatedResponse<T> {
|
||||
data: T[];
|
||||
pagination: {
|
||||
page: number;
|
||||
limit: number;
|
||||
total: number;
|
||||
totalPages: number;
|
||||
hasNext: boolean;
|
||||
hasPrev: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
export interface IScrapedNews {
|
||||
title: string;
|
||||
description: string;
|
||||
url: string;
|
||||
imageUrl?: string;
|
||||
category?: string;
|
||||
}
|
Reference in New Issue
Block a user