Merge branch 'develop'

This commit is contained in:
Ethan Lane 2024-08-07 17:13:44 +01:00
commit da2e286d16
29 changed files with 1235 additions and 1287 deletions

View file

@ -1 +0,0 @@
dist/

View file

@ -1,50 +0,0 @@
{
"parserOptions": {
"ecmaVersion": 6
},
"extends": [
"eslint:recommended"
],
"rules": {
"camelcase": "error",
"brace-style": [
"error",
"1tbs"
],
"comma-dangle": [
"error",
"never"
],
"comma-spacing": [
"error",
{
"before": false,
"after": true
}
],
"comma-style": [
"error",
"last"
],
"arrow-body-style": [
"error",
"as-needed"
],
"arrow-parens": [
"error",
"as-needed"
],
"arrow-spacing": "error",
"no-var": "error",
"prefer-template": "error",
"prefer-const": "error"
},
"globals": {
"exports": "writable",
"module": "writable",
"require": "writable",
"process": "writable",
"console": "writable",
"jest": "writable"
}
}

View file

@ -0,0 +1,38 @@
name: Publish to NPM
on:
push:
tags:
- '*'
jobs:
build:
runs-on: node
steps:
- uses: actions/checkout@v4
- name: Use Node.js
uses: actions/setup-node@v4
with:
node-version: 20.x
- run: yarn install --frozen-lockfile
- run: yarn build
- run: yarn test
- run: yarn lint
publish:
needs: build
runs-on: node
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: 20.x
- run: yarn install --frozen-lockfile
- run: yarn build
- run: yarn package
- run: cp -r ./bin ${{ secrets.PROD_REPO_PATH }}
- uses: https://github.com/JS-DevTools/npm-publish@v3
with:
token: ${{ secrets.NPM_TOKEN }}

View file

@ -0,0 +1,35 @@
name: Stage
on:
push:
branches:
- develop
jobs:
build:
runs-on: node
steps:
- uses: actions/checkout@v4
- name: Use Node.js
uses: actions/setup-node@v4
with:
node-version: 20.x
- run: yarn install --frozen-lockfile
- run: yarn build
- run: yarn test
- run: yarn lint
rsync:
needs: build
runs-on: node
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: 20.x
- run: yarn install --frozen-lockfile
- run: yarn build
- run: yarn package
- run: cp -r ./bin ${{ secrets.STAGE_REPO_PATH }}

View file

@ -0,0 +1,25 @@
name: Test
on:
push:
branches:
- feature/*
- hotfix/*
- renovate/*
- dependabot/*
jobs:
build:
runs-on: node
steps:
- uses: actions/checkout@v4
- name: Use Node.js
uses: actions/setup-node@v4
with:
node-version: 20.x
- run: yarn install --frozen-lockfile
- run: yarn build
- run: yarn test
- run: yarn lint

View file

@ -1,92 +0,0 @@
steps:
build:
# Build and test
image: node
commands:
- yarn install --frozen-lockfile
- yarn build
when:
event: [ push, pull_request ]
branch: [ hotfix/*, feature/*, renovate/*, develop, master ]
lint:
image: node
commands:
- yarn lint
when:
event: [ push, pull_request ]
branch: [ hotfix/*, feature/*, renovate/*, develop, master ]
test:
image: node
commands:
- yarn test
when:
event: [ push, pull_request ]
branch: [ hotfix/*, feature/*, renovate/*, develop, master ]
# Package production build
package_prod:
image: node
commands:
- apt install unzip -y
- wget https://github.com/xerub/ldid/releases/download/42/ldid.zip
- unzip ldid.zip
- mkdir /opt/ldid/
- cp linux64/ldid /opt/ldid/
- export PATH=/opt/ldid:$PATH
- yarn package
when:
event: push
branch: master
rsync_prod:
image: alpine
secrets: [ ssh_key ]
commands:
- apk add rsync openssh-client
- eval `ssh-agent -s`
- echo "$SSH_KEY" | tr -d '\r' | ssh-add -
- rsync -e "ssh -o StrictHostKeyChecking=no" -r ./bin vylpes@192.168.1.115:/home/vylpes/apps/random-bunny/random-bunny_prod
when:
event: push
branch: master
# Package stage build
package_stage:
image: node
commands:
- apt install unzip -y
- wget https://github.com/xerub/ldid/releases/download/42/ldid.zip
- unzip ldid.zip
- mkdir /opt/ldid/
- cp linux64/ldid /opt/ldid/
- export PATH=/opt/ldid:$PATH
- yarn package
when:
event: push
branch: develop
rsync_stage:
image: alpine
secrets: [ ssh_key ]
commands:
- apk add rsync openssh-client
- eval `ssh-agent -s`
- echo "$SSH_KEY" | tr -d '\r' | ssh-add -
- rsync -e "ssh -o StrictHostKeyChecking=no" -r ./bin vylpes@192.168.1.115:/home/vylpes/apps/random-bunny/random-bunny_stage
when:
event: push
branch: develop
# Publish to NPM
publish:
image: plugins/npm
settings:
username:
from_secret: NPM_USERNAME
email:
from_secret: NPM_EMAIL
password:
from_secret: NPM_PASSWORD
token:
from_secret: NPM_TOKEN
when:
event: push
branch: master

View file

@ -4,7 +4,7 @@ Since Version 2.2, Random Bunny contains a command line interface (CLI).
## Downloads ## Downloads
The project can be downloaded as a binary for your system via the [GitHub Releases](https://github.com/Vylpes/random-bunny/releases) or [Gitea Releases](https://gitea.vylpes.xyz/RabbitLabs/random-bunny/releases) page. The project can be downloaded as a binary for your system via the [GitHub Releases](https://github.com/Vylpes/random-bunny/releases) or [Forgejo Releases](https://git.vylpes.xyz/RabbitLabs/random-bunny/releases) page.
We currently support: We currently support:
- Linux (x64) - Linux (x64)
@ -13,6 +13,8 @@ We currently support:
The git repository can also be cloned and ran via `yarn build` and `yarn start`. The git repository can also be cloned and ran via `yarn build` and `yarn start`.
You can produce the binary using the `yarn package` command. This creates the binaries in the `./bin` folder.
> **NOTE:** We are aware of a bug in the macOS Arm64 builds failing to execute. For now you're still able to use the x64 builds under Rosetta fine. This will hopefully be fixed in a future release. > **NOTE:** We are aware of a bug in the macOS Arm64 builds failing to execute. For now you're still able to use the x64 builds under Rosetta fine. This will hopefully be fixed in a future release.
## Default Output ## Default Output
@ -20,9 +22,10 @@ The git repository can also be cloned and ran via `yarn build` and `yarn start`.
By default, the command will fetch a random image from `r/rabbits` and return it in a human-readable output. By default, the command will fetch a random image from `r/rabbits` and return it in a human-readable output.
``` ```
$ randombunny $ random-bunny
Archived = false Archived = false
Author = Rabbit_Owner
Downvotes = 0 Downvotes = 0
Hidden = false Hidden = false
Permalink = /r/Rabbits/comments/1av1rg9/cute_baby_bun/ Permalink = /r/Rabbits/comments/1av1rg9/cute_baby_bun/
@ -38,11 +41,11 @@ Url = https://i.redd.it/sfz0srdrimjc1.png
The command also includes a help option in case you are stuck. The command also includes a help option in case you are stuck.
``` ```
$ randombunny --help $ random-bunny --help
# or # or
$ randombunny -h $ random-bunny -h
Usage: random-bunny [options] Usage: random-bunny [options]
@ -53,9 +56,10 @@ Options:
-s, --subreddit <subreddit> The subreddit to search (default: "rabbits") -s, --subreddit <subreddit> The subreddit to search (default: "rabbits")
-j, --json Output as JSON -j, --json Output as JSON
-q, --query-metadata Include query metadata in result -q, --query-metadata Include query metadata in result
-o <file> Output to file
--sort <sort> Sort by (choices: "hot", "new", "top", default: "hot") --sort <sort> Sort by (choices: "hot", "new", "top", default: "hot")
--limit <limit> The amount of posts to fetch from the reddit api (default: 100)
-h, --help display help for command -h, --help display help for command
✨ Done in 0.32s.
``` ```
## JSON output ## JSON output
@ -63,13 +67,13 @@ Options:
You can also convert the output into JSON, if you need to input it to another program. You can also convert the output into JSON, if you need to input it to another program.
```bash ```bash
$ randombunny --json $ random-bunny --json
# or # or
$ randonbunny -j $ randon-bunny -j
{"Archived":false,"Downs":0,"Hidden":false,"Permalink":"/r/Rabbits/comments/1av1rg9/cute_baby_bun/","Subreddit":"Rabbits","SubredditSubscribers":486085,"Title":"Cute baby bun","Ups":210,"Url":"https://i.redd.it/sfz0srdrimjc1.png"} {"Archived":false,"Author":"Rabbit_Owner","Downs":0,"Hidden":false,"Permalink":"/r/Rabbits/comments/1av1rg9/cute_baby_bun/","Subreddit":"Rabbits","SubredditSubscribers":486085,"Title":"Cute baby bun","Ups":210,"Url":"https://i.redd.it/sfz0srdrimjc1.png"}
``` ```
## Sort ## Sort
@ -79,9 +83,9 @@ You can also choose the sorting option which reddit will use to return the avail
This defaults to "hot". The valid options are "hot", "new", and "top". This defaults to "hot". The valid options are "hot", "new", and "top".
``` ```
$ randombunny --sort hot $ random-bunny --sort hot
$ randombunny --sort new $ random-bunny --sort new
$ randomBunny --sort top $ random-bunny --sort top
``` ```
@ -92,6 +96,28 @@ You can change the subreddit which the command fetches from.
This defaults to "rabbits" This defaults to "rabbits"
``` ```
$ randombunny --subreddit rabbits $ random-bunny --subreddit rabbits
$ randombunny -s horses $ random-bunny -s horses
```
## Output to file
If you'd rather send the output to a file, you can supply the `-o` flag.
```
$ randombunny -o ~/Desktop/output.txt
```
## Reddit API Return Limits
You can also limit the amount the posts the script requests from the Reddit API
using the `--limit` option.
This defaults to 100. This accepts any number between 1 and 100.
Please note limiting the calls to less than 100 will give a higher chance of
the script not finding any valid image post to return.
```
$ random-bunny --limit 50
``` ```

54
eslint.config.mjs Normal file
View file

@ -0,0 +1,54 @@
import js from "@eslint/js";
import ts from "typescript-eslint";
export default [
{
ignores: [
"**/dist/",
"eslint.config.mjs",
"jest.config.cjs",
"jest.setup.js"
],
},
js.configs.recommended,
...ts.configs.recommended,
{
languageOptions: {
globals: {
exports: "writable",
module: "writable",
require: "writable",
process: "writable",
console: "writable",
jest: "writable",
},
ecmaVersion: 6,
sourceType: "script",
},
files: [
"./src",
"./tests"
],
rules: {
camelcase: "error",
"brace-style": ["error", "1tbs"],
"comma-dangle": ["error", "never"],
"comma-spacing": ["error", {
before: false,
after: true,
}],
"comma-style": ["error", "last"],
"arrow-body-style": ["error", "as-needed"],
"arrow-parens": ["error", "as-needed"],
"arrow-spacing": "error",
"no-var": "error",
"prefer-template": "error",
"prefer-const": "error",
},
}
];

View file

@ -39,19 +39,22 @@
"homepage": "https://gitea.vylpes.xyz/RabbitLabs/random-bunny", "homepage": "https://gitea.vylpes.xyz/RabbitLabs/random-bunny",
"funding": "https://ko-fi.com/vylpes", "funding": "https://ko-fi.com/vylpes",
"devDependencies": { "devDependencies": {
"@types/eslint": "^8.21.1", "@eslint/eslintrc": "^3.1.0",
"@eslint/js": "^9.8.0",
"@types/eslint": "^9.6.0",
"@types/jest": "^29.5.8", "@types/jest": "^29.5.8",
"@types/node": "^20.0.0", "@types/node": "^20.0.0",
"@typescript-eslint/eslint-plugin": "^7.0.0", "@typescript-eslint/eslint-plugin": "^7.18.0",
"@typescript-eslint/parser": "^5.54.0", "@typescript-eslint/parser": "^7.18.0",
"eslint": "^8.49.0", "@yao-pkg/pkg": "^5.12.0",
"eslint": "^9.8.0",
"jest": "^29.7.0", "jest": "^29.7.0",
"jest-mock-extended": "^3.0.3", "jest-mock-extended": "^3.0.3",
"np": "^9.0.0", "np": "^10.0.0",
"pkg": "^5.8.1",
"ts-jest": "^29.1.1", "ts-jest": "^29.1.1",
"ts-mockito": "^2.6.1", "ts-mockito": "^2.6.1",
"typescript": "^5.0.0" "typescript": "^5.0.0",
"typescript-eslint": "^7.18.0"
}, },
"resolutions": { "resolutions": {
"np/**/got": "^14.0.0", "np/**/got": "^14.0.0",

View file

@ -33,10 +33,11 @@ console.log(result);
### `randomBunny()` ### `randomBunny()`
Returns a `json string` for a random post. Accepts 2 arguments: `subreddit`, and `sortby` ('new', 'hot', 'top') Returns a `json string` for a random post. Accepts 3 arguments: `subreddit`, `sortby` ('new', 'hot', 'top'), and `limit` (1-100, default 100)
The json string which gets returned consists of: The json string which gets returned consists of:
- archived - archived
- author
- downs - downs
- hidden - hidden
- permalink - permalink

View file

@ -1,4 +1,5 @@
{ {
"$schema": "https://docs.renovatebot.com/renovate-schema.json", "$schema": "https://docs.renovatebot.com/renovate-schema.json",
"baseBranches": ["develop"] "baseBranches": ["develop"],
"labels": ["type/dependencies"]
} }

View file

@ -2,55 +2,24 @@ import { Command, Option } from "commander";
import randomBunny from "./index"; import randomBunny from "./index";
import ICliOptions from "./contracts/ICliOptions"; import ICliOptions from "./contracts/ICliOptions";
import { exit } from "process"; import { exit } from "process";
import CliHelper from "./helpers/cliHelper";
const program = new Command(); const program = new Command();
program program
.name('random-bunny') .name('random-bunny')
.description('Get a random image url from a subreddit of your choosing') .description('Get a random image url from a subreddit of your choosing')
.version('2.2') .version('2.3')
.option('-s, --subreddit <subreddit>', 'The subreddit to search', 'rabbits') .option('-s, --subreddit <subreddit>', 'The subreddit to search', 'rabbits')
.option('-j, --json', 'Output as JSON') .option('-j, --json', 'Output as JSON')
.option('-q, --query-metadata', 'Include query metadata in result') .option('-q, --query-metadata', 'Include query metadata in result')
.addOption(new Option('--sort <sort>', 'Sort by').default('hot').choices(['hot', 'new', 'top'])); .option('-o <file>', 'Output to file')
.addOption(new Option('--sort <sort>', 'Sort by').default('hot').choices(['hot', 'new', 'top']))
.addOption(new Option('--limit <limit>', 'The amount of posts to fetch from the reddit api').default(100));
program.parse(); program.parse();
const options: ICliOptions = program.opts(); const options: ICliOptions = program.opts();
randomBunny(options.subreddit, options.sort) randomBunny(options.subreddit, options.sort, options.limit)
.then((response) => { .then((response) => exit(CliHelper.Endpoint(response, options)));
if (response.IsSuccess) {
const result = response.Result!;
const outputLines: string[] = [];
if (options.json) {
console.log(JSON.stringify(result));
return;
}
outputLines.push(`Archived = ${result.Archived}`);
outputLines.push(`Downvotes = ${result.Downs}`);
outputLines.push(`Hidden = ${result.Hidden}`);
outputLines.push(`Permalink = ${result.Permalink}`);
outputLines.push(`Subreddit = ${result.Subreddit}`);
outputLines.push(`Subreddit Subscribers = ${result.SubredditSubscribers}`);
outputLines.push(`Title = ${result.Title}`);
outputLines.push(`Upvotes = ${result.Ups}`);
outputLines.push(`Url = ${result.Url}`);
if (options.queryMetadata != null) {
outputLines.push(`Query.Subreddit = ${response.Query.subreddit}`);
outputLines.push(`Query.Sort By = ${response.Query.sortBy}`);
}
console.log(outputLines.join("\n"));
exit(0);
} else {
const error = response.Error!;
console.error(error.Message, error.Code);
exit(1);
}
});

View file

@ -3,4 +3,5 @@ export enum ErrorCode {
FailedToFetchReddit, FailedToFetchReddit,
UnableToParseJSON, UnableToParseJSON,
NoImageResultsFound, NoImageResultsFound,
LimitOutOfRange,
} }

View file

@ -2,4 +2,5 @@ export default class ErrorMessages {
public static readonly FailedToFetchReddit = "Failed to fetch result from Reddit"; public static readonly FailedToFetchReddit = "Failed to fetch result from Reddit";
public static readonly UnableToParseJSON = "Unable to parse the JSON result"; public static readonly UnableToParseJSON = "Unable to parse the JSON result";
public static readonly NoImageResultsFound = "No image results found in response from Reddit"; public static readonly NoImageResultsFound = "No image results found in response from Reddit";
public static readonly LimitOutOfRange = "Limit must be a number between 1 and 100";
} }

View file

@ -1,6 +1,8 @@
export default interface ICliOptions { export default interface ICliOptions {
subreddit: string, subreddit: string,
json?: boolean, json?: boolean,
sort: string, sort: "new" | "hot" | "top",
o?: string,
limit: number,
queryMetadata?: boolean, queryMetadata?: boolean,
} }

View file

@ -1,6 +1,7 @@
export default interface IFetchResult { export default interface IFetchResult {
data: { data: {
archived: boolean, archived: boolean,
author: string,
downs: number, downs: number,
hidden: boolean, hidden: boolean,
permalink: string, permalink: string,

View file

@ -1,5 +1,6 @@
export default interface IRedditResult { export default interface IRedditResult {
Archived: boolean, Archived: boolean,
Author: string,
Downs: number, Downs: number,
Hidden: boolean, Hidden: boolean,
Permalink: string, Permalink: string,

View file

@ -1,4 +1,5 @@
export default interface QueryResult { export default interface QueryResult {
subreddit: string, subreddit: string,
sortBy: string, sortBy: string,
limit: number,
} }

25
src/helpers/cliHelper.ts Normal file
View file

@ -0,0 +1,25 @@
import { writeFileSync } from "fs";
import ICliOptions from "../contracts/ICliOptions";
import IReturnResult from "../contracts/IReturnResult";
import OutputHelper from "./outputHelper";
export default class CliHelper {
public static Endpoint(response: IReturnResult, options: ICliOptions): number {
if (response.IsSuccess) {
const output = OutputHelper.GenerateOutput(response, options);
if (options.o) {
writeFileSync(options.o, `${output}\n`);
} else {
console.log(output);
}
return 0;
} else {
const error = response.Error!;
console.error(error.Message, error.Code);
return 1;
}
}
}

View file

@ -0,0 +1,33 @@
import ICliOptions from "../contracts/ICliOptions";
import IReturnResult from "../contracts/IReturnResult";
export default class OutputHelper {
public static GenerateOutput(response: IReturnResult, options: ICliOptions): string {
const result = response.Result!;
const outputLines: string[] = [];
if (options.json) {
return JSON.stringify(result);
}
outputLines.push(`Archived = ${result.Archived}`);
outputLines.push(`Author = ${result.Author}`);
outputLines.push(`Downvotes = ${result.Downs}`);
outputLines.push(`Hidden = ${result.Hidden}`);
outputLines.push(`Permalink = ${result.Permalink}`);
outputLines.push(`Subreddit = ${result.Subreddit}`);
outputLines.push(`Subreddit Subscribers = ${result.SubredditSubscribers}`);
outputLines.push(`Title = ${result.Title}`);
outputLines.push(`Upvotes = ${result.Ups}`);
outputLines.push(`Url = ${result.Url}`);
if (options.queryMetadata != null) {
outputLines.push(`Query.Subreddit = ${response.Query.subreddit}`);
outputLines.push(`Query.Sort By = ${response.Query.sortBy}`);
outputLines.push(`Query.Limit = ${response.Query.limit}`);
}
return outputLines.join("\n");
}
}

View file

@ -5,18 +5,25 @@ import { List } from 'linqts';
import IFetchResult from "./contracts/IFetchResult"; import IFetchResult from "./contracts/IFetchResult";
import { ErrorCode } from "./constants/ErrorCode"; import { ErrorCode } from "./constants/ErrorCode";
import ErrorMessages from "./constants/ErrorMessages"; import ErrorMessages from "./constants/ErrorMessages";
import ImageHelper from "./imageHelper"; import ImageHelper from "./helpers/imageHelper";
const sortable = [ export default async function randomBunny(subreddit: string, sortBy: "new" | "hot" | "top" = 'hot', limit: number = 100): Promise<IReturnResult> {
'new', if (limit < 1 || limit > 100) {
'hot', return {
'top' IsSuccess: false,
]; Query: {
subreddit: subreddit,
sortBy: sortBy,
limit: limit,
},
Error: {
Code: ErrorCode.LimitOutOfRange,
Message: ErrorMessages.LimitOutOfRange,
}
};
}
export default async function randomBunny(subreddit: string, sortBy: string = 'hot'): Promise<IReturnResult> { const result = await fetch(`https://reddit.com/r/${subreddit}/${sortBy}.json?limit=${limit}`)
if (!sortable.includes(sortBy)) sortBy = 'hot';
const result = await fetch(`https://reddit.com/r/${subreddit}/${sortBy}.json?limit=100`)
.then((res) => { .then((res) => {
return res; return res;
}) })
@ -30,6 +37,7 @@ export default async function randomBunny(subreddit: string, sortBy: string = 'h
Query: { Query: {
subreddit: subreddit, subreddit: subreddit,
sortBy: sortBy, sortBy: sortBy,
limit: limit,
}, },
Error: { Error: {
Code: ErrorCode.FailedToFetchReddit, Code: ErrorCode.FailedToFetchReddit,
@ -46,6 +54,7 @@ export default async function randomBunny(subreddit: string, sortBy: string = 'h
Query: { Query: {
subreddit: subreddit, subreddit: subreddit,
sortBy: sortBy, sortBy: sortBy,
limit: limit,
}, },
Error: { Error: {
Code: ErrorCode.UnableToParseJSON, Code: ErrorCode.UnableToParseJSON,
@ -68,6 +77,7 @@ export default async function randomBunny(subreddit: string, sortBy: string = 'h
Query: { Query: {
subreddit: subreddit, subreddit: subreddit,
sortBy: sortBy, sortBy: sortBy,
limit: limit,
}, },
Error: { Error: {
Code: ErrorCode.NoImageResultsFound, Code: ErrorCode.NoImageResultsFound,
@ -93,6 +103,7 @@ export default async function randomBunny(subreddit: string, sortBy: string = 'h
Query: { Query: {
subreddit: subreddit, subreddit: subreddit,
sortBy: sortBy, sortBy: sortBy,
limit: limit,
}, },
Error: { Error: {
Code: ErrorCode.NoImageResultsFound, Code: ErrorCode.NoImageResultsFound,
@ -107,6 +118,7 @@ export default async function randomBunny(subreddit: string, sortBy: string = 'h
} }
const redditResult: IRedditResult = { const redditResult: IRedditResult = {
Author: randomData['author'],
Archived: randomData['archived'], Archived: randomData['archived'],
Downs: randomData['downs'], Downs: randomData['downs'],
Hidden: randomData['hidden'], Hidden: randomData['hidden'],
@ -123,6 +135,7 @@ export default async function randomBunny(subreddit: string, sortBy: string = 'h
Query: { Query: {
subreddit: subreddit, subreddit: subreddit,
sortBy: sortBy, sortBy: sortBy,
limit: limit,
}, },
Result: redditResult Result: redditResult
}; };

View file

@ -1,187 +0,0 @@
import { exec } from "child_process";
import path from "path";
describe('default', () => {
test('GIVEN no options are supplied, EXPECT standard output', async () => {
const result = await cli([], '.');
const keys = result.stdout.split('\n')
.flatMap(x => x.split(' = ')[0])
.filter(x => x && x.length > 0);
const values = result.stdout.split('\n')
.flatMap(x => x.split(' = ')[1])
.filter(x => x && x.length > 0);
expect(result.code).toBe(0);
expect(keys).toStrictEqual(['Archived', 'Downvotes', 'Hidden', 'Permalink', 'Subreddit', 'Subreddit Subscribers', 'Title', 'Upvotes', 'Url']);
expect(values.length).toBe(9);
}, 5000);
test('GIVEN an error occurs, EXPECT error output', async () => {
const result = await cli(['-s', 'textonly'], '.');
expect(result.code).toBe(1);
expect(result.stderr).toBeDefined();
}, 5000);
});
describe('version', () => {
test('GIVEN -V flag is supplied, EXPECT version returned', async () => {
const result = await cli(['-V'], '.');
expect(result.code).toBe(0);
expect(result.stdout).toBe('2.2\n');
});
test('GIVEN --version is supplied, EXPECT version returned', async () => {
const result = await cli(['--version'], '.');
expect(result.code).toBe(0);
expect(result.stdout).toBe('2.2\n');
});
});
describe('help', () => {
test('GIVEN -h is supplied, EXPECT help returned', async () => {
const result = await cli(['-h'], '.');
expect(result.code).toBe(0);
expect(result.stdout.split('\n')[0]).toBe('Usage: random-bunny [options]');
});
test('GIVEN --help is supplied, EXPECT help returned', async () => {
const result = await cli(['--help'], '.');
expect(result.code).toBe(0);
expect(result.stdout.split('\n')[0]).toBe('Usage: random-bunny [options]');
});
});
describe('subreddit', () => {
test('GIVEN -s is not supplied, EXPECT subreddit to be defaulted', async () => {
const result = await cli([], '.');
const subreddit = result.stdout.split('\n')
.find(x => x && x.length > 0 && x.split(' = ')[0] == 'Subreddit')!
.split(' = ')[1];
expect(subreddit).toBe('Rabbits');
}, 5000);
test('GIVEN -s is supplied, EXPECT subreddit to be changed', async () => {
const result = await cli(['-s', 'pics'], '.');
const subreddit = result.stdout.split('\n')
.find(x => x && x.length > 0 && x.split(' = ')[0] == 'Subreddit')!
.split(' = ')[1];
expect(subreddit).toBe('pics');
}, 5000);
test('GIVEN --subreddit is supplied, EXPECT subreddit to be changed', async () => {
const result = await cli(['--subreddit', 'pics'], '.');
const subreddit = result.stdout.split('\n')
.find(x => x && x.length > 0 && x.split(' = ')[0] == 'Subreddit')!
.split(' = ')[1];
expect(subreddit).toBe('pics');
}, 5000);
});
describe('json', () => {
test('GIVEN -j is supplied, EXPECT output to be valid JSON', async () => {
const result = await cli(['-j'], '.');
const json = JSON.parse(result.stdout);
expect(json).toBeDefined();
}, 5000);
test('GIVEN --json is supplied, EXPECT output to be valid JSON', async () => {
const result = await cli(['--json'], '.');
const json = JSON.parse(result.stdout);
expect(json).toBeDefined();
}, 5000);
});
describe('sort', () => {
test('GIVEN --sort is not supplied, EXPECT sort to be defaulted', async () => {
const result = await cli(['-q'], '.');
const sortBy = result.stdout.split('\n')
.find(x => x && x.length > 0 && x.split(' = ')[0] == 'Query.Sort By')!
.split(' = ')[1];
expect(sortBy).toBe('hot');
}, 5000);
test('GIVEN --sort is supplied WITH a valid input, EXPECT sort to be used', async () => {
const result = await cli(['-q', '--sort', 'new'], '.');
const sortBy = result.stdout.split('\n')
.find(x => x && x.length > 0 && x.split(' = ')[0] == 'Query.Sort By')!
.split(' = ')[1];
expect(sortBy).toBe('new');
}, 5000);
test('GIVEN --sort is supplied WITH an invalid input, EXPECT error', async () => {
const result = await cli(['-q', '--sort', 'invalid'], '.');
expect(result.code).toBe(1);
expect(result.stderr).toBe("error: option '--sort <sort>' argument 'invalid' is invalid. Allowed choices are hot, new, top.\n");
}, 5000);
});
describe('query-metadata', () => {
test('GIVEN --query-metadata is not supplied, EXPECT no query metadata returned', async () => {
const result = await cli([], '.');
const query = result.stdout.split('\n')
.find(x => x && x.length > 0 && x.split(' = ')[0].startsWith('Query'));
expect(query).toBeUndefined();
}, 5000);
test('GIVEN --query-metadata is supplied, EXPECT query metadata returned', async () => {
const result = await cli(['--query-metadata'], '.');
const query = result.stdout.split('\n')
.find(x => x && x.length > 0 && x.split(' = ')[0].startsWith('Query'));
expect(query).toBeDefined();
}, 5000);
test('GIVEN -q is supplied, EXPECT query metadata returned', async () => {
const result = await cli(['-q'], '.');
const query = result.stdout.split('\n')
.find(x => x && x.length > 0 && x.split(' = ')[0].startsWith('Query'));
expect(query).toBeDefined();
}, 5000);
});
function cli(args: string[], cwd: string): Promise<cliResult> {
return new Promise(resolve => {
exec(`node ${path.resolve('./dist/cli.js')} ${args.join(' ')}`,
{ cwd },
(error, stdout, stderr) => { resolve({
code: error && error.code ? error.code : 0,
error,
stdout,
stderr });
});
});
}
interface cliResult {
code: number,
error: any,
stdout: string,
stderr: string,
}

View file

@ -0,0 +1,32 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`GenerateOutput EXPECT standout output to be returned 1`] = `
"Archived = false
Author = author
Downvotes = 0
Hidden = false
Permalink = /r/Rabbits/comments/1dj8pbt/this_is_my_ms_bear/
Subreddit = Rabbits
Subreddit Subscribers = 654751
Title = This is my Ms Bear!
Upvotes = 17
Url = https://preview.redd.it/d5yno653zf7d1.jpg?width=640&crop=smart&auto=webp&s=5064d1caec3c12ac2855eb57ff131d0b313d5e9d"
`;
exports[`GenerateOutput GIVEN options.json is true, EXPECT output to be returned as JSON 1`] = `"{"Archived":false,"Author":"author","Downs":0,"Hidden":false,"Permalink":"/r/Rabbits/comments/1dj8pbt/this_is_my_ms_bear/","Subreddit":"Rabbits","SubredditSubscribers":654751,"Title":"This is my Ms Bear!","Ups":17,"Url":"https://preview.redd.it/d5yno653zf7d1.jpg?width=640&crop=smart&auto=webp&s=5064d1caec3c12ac2855eb57ff131d0b313d5e9d"}"`;
exports[`GenerateOutput GIVEN options.queryMetadata is supplied, EXPECT query metadata to be added 1`] = `
"Archived = false
Author = author
Downvotes = 0
Hidden = false
Permalink = /r/Rabbits/comments/1dj8pbt/this_is_my_ms_bear/
Subreddit = Rabbits
Subreddit Subscribers = 654751
Title = This is my Ms Bear!
Upvotes = 17
Url = https://preview.redd.it/d5yno653zf7d1.jpg?width=640&crop=smart&auto=webp&s=5064d1caec3c12ac2855eb57ff131d0b313d5e9d
Query.Subreddit = rabbits
Query.Sort By = hot
Query.Limit = 100"
`;

View file

@ -0,0 +1,118 @@
import fs from "fs";
import CliHelper from "../../src/helpers/cliHelper";
import ICliOptions from "../../src/contracts/ICliOptions";
import IReturnResult from "../../src/contracts/IReturnResult";
import OutputHelper from "../../src/helpers/outputHelper";
import { ErrorCode } from "../../src/constants/ErrorCode";
describe("Endpoint", () => {
describe("GIVEN response is successful", () => {
test("GIVEN options.o is defined, EXPECT output written to file", () => {
// Arrange
const response = {
IsSuccess: true,
} as IReturnResult;
const options = {
o: "file.txt",
} as ICliOptions;
OutputHelper.GenerateOutput = jest.fn().mockReturnValue("test output");
fs.writeFileSync = jest.fn();
console.log = jest.fn();
console.error = jest.fn();
// Act
const result = CliHelper.Endpoint(response, options);
// Assert
expect(result).toBe(0);
expect(OutputHelper.GenerateOutput).toHaveBeenCalledTimes(1);
expect(OutputHelper.GenerateOutput).toHaveBeenCalledWith(response, options);
expect(fs.writeFileSync).toHaveBeenCalledTimes(1);
expect(fs.writeFileSync).toHaveBeenCalledWith("file.txt", "test output\n");
expect(console.log).not.toHaveBeenCalled();
expect(console.error).not.toHaveBeenCalled();
});
test("GIVEN options.o is undefined, EXPECT output logged to console", () => {
// Arrange
const response = {
IsSuccess: true,
} as IReturnResult;
const options = {
o: undefined,
} as ICliOptions;
OutputHelper.GenerateOutput = jest.fn().mockReturnValue("test output");
fs.writeFileSync = jest.fn();
console.log = jest.fn();
console.error = jest.fn();
// Act
const result = CliHelper.Endpoint(response, options);
// Assert
expect(result).toBe(0);
expect(OutputHelper.GenerateOutput).toHaveBeenCalledTimes(1);
expect(OutputHelper.GenerateOutput).toHaveBeenCalledWith(response, options);
expect(fs.writeFileSync).not.toHaveBeenCalled();
expect(console.log).toHaveBeenCalledTimes(1);
expect(console.log).toHaveBeenCalledWith("test output");
expect(console.error).not.toHaveBeenCalled();
});
});
test("GIVEN response is failure, EXPECT error logged to console", () => {
// Arrange
const response = {
IsSuccess: false,
Error: {
Message: "error message",
Code: ErrorCode.FailedToFetchReddit,
},
} as IReturnResult;
const options = {
o: "file.txt",
} as ICliOptions;
OutputHelper.GenerateOutput = jest.fn().mockReturnValue("test output");
fs.writeFileSync = jest.fn();
console.log = jest.fn();
console.error = jest.fn();
// Act
const result = CliHelper.Endpoint(response, options);
// Assert
expect(result).toBe(1);
expect(OutputHelper.GenerateOutput).not.toHaveBeenCalled();
expect(fs.writeFileSync).not.toHaveBeenCalled();
expect(console.log).not.toHaveBeenCalled();
expect(console.error).toHaveBeenCalledTimes(1);
expect(console.error).toHaveBeenCalledWith("error message", ErrorCode.FailedToFetchReddit);
});
});

View file

@ -1,4 +1,4 @@
import ImageHelper from "../src/imageHelper"; import ImageHelper from "../../src/helpers/imageHelper";
import fetch from "got-cjs"; import fetch from "got-cjs";
jest.mock('got-cjs'); jest.mock('got-cjs');

View file

@ -0,0 +1,105 @@
import ICliOptions from "../../src/contracts/ICliOptions";
import IReturnResult from "../../src/contracts/IReturnResult";
import OutputHelper from "../../src/helpers/outputHelper";
describe("GenerateOutput", () => {
test("EXPECT standout output to be returned", () => {
// Arrange
const response = {
IsSuccess: true,
Query: {
subreddit: "rabbits",
sortBy: "hot",
limit: 100,
},
Result: {
Archived: false,
Author: 'author',
Downs: 0,
Hidden: false,
Permalink: "/r/Rabbits/comments/1dj8pbt/this_is_my_ms_bear/",
Subreddit: "Rabbits",
SubredditSubscribers: 654751,
Title: "This is my Ms Bear!",
Ups: 17,
Url: "https://preview.redd.it/d5yno653zf7d1.jpg?width=640&crop=smart&auto=webp&s=5064d1caec3c12ac2855eb57ff131d0b313d5e9d",
},
} as IReturnResult;
const options = {} as ICliOptions;
// Act
const result = OutputHelper.GenerateOutput(response, options);
// Assert
expect(result).toMatchSnapshot();
});
test("GIVEN options.json is true, EXPECT output to be returned as JSON", () => {
// Arrange
const response = {
IsSuccess: true,
Query: {
subreddit: "rabbits",
sortBy: "hot",
limit: 100,
},
Result: {
Archived: false,
Author: 'author',
Downs: 0,
Hidden: false,
Permalink: "/r/Rabbits/comments/1dj8pbt/this_is_my_ms_bear/",
Subreddit: "Rabbits",
SubredditSubscribers: 654751,
Title: "This is my Ms Bear!",
Ups: 17,
Url: "https://preview.redd.it/d5yno653zf7d1.jpg?width=640&crop=smart&auto=webp&s=5064d1caec3c12ac2855eb57ff131d0b313d5e9d",
},
} as IReturnResult;
const options = {
json: true,
} as ICliOptions;
// Act
const result = OutputHelper.GenerateOutput(response, options);
// Assert
expect(result).toMatchSnapshot();
});
test("GIVEN options.queryMetadata is supplied, EXPECT query metadata to be added", () => {
// Arrange
const response = {
IsSuccess: true,
Query: {
subreddit: "rabbits",
sortBy: "hot",
limit: 100,
},
Result: {
Archived: false,
Author: 'author',
Downs: 0,
Hidden: false,
Permalink: "/r/Rabbits/comments/1dj8pbt/this_is_my_ms_bear/",
Subreddit: "Rabbits",
SubredditSubscribers: 654751,
Title: "This is my Ms Bear!",
Ups: 17,
Url: "https://preview.redd.it/d5yno653zf7d1.jpg?width=640&crop=smart&auto=webp&s=5064d1caec3c12ac2855eb57ff131d0b313d5e9d",
},
} as IReturnResult;
const options = {
queryMetadata: true,
} as ICliOptions;
// Act
const result = OutputHelper.GenerateOutput(response, options);
// Assert
expect(result).toMatchSnapshot();
});
});

View file

@ -1,12 +1,16 @@
import { ErrorCode } from "../src/constants/ErrorCode"; import { ErrorCode } from "../src/constants/ErrorCode";
import ErrorMessages from "../src/constants/ErrorMessages"; import ErrorMessages from "../src/constants/ErrorMessages";
import ImageHelper from "../src/imageHelper"; import ImageHelper from "../src/helpers/imageHelper";
import randomBunny from "../src/index"; import randomBunny from "../src/index";
import fetch from "got-cjs"; import fetch from "got-cjs";
jest.mock('got-cjs'); jest.mock('got-cjs');
const fetchMock = jest.mocked(fetch); const fetchMock = jest.mocked(fetch);
beforeEach(() => {
fetchMock.mockReset();
});
describe('randomBunny', () => { describe('randomBunny', () => {
test('GIVEN subreddit AND sortBy is supplied, EXPECT successful result', async() => { test('GIVEN subreddit AND sortBy is supplied, EXPECT successful result', async() => {
fetchMock.mockResolvedValue({ fetchMock.mockResolvedValue({
@ -16,6 +20,7 @@ describe('randomBunny', () => {
{ {
data: { data: {
archived: false, archived: false,
author: 'author',
downs: 0, downs: 0,
hidden: false, hidden: false,
permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/', permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/',
@ -37,7 +42,7 @@ describe('randomBunny', () => {
expect(result.Result).toBeDefined(); expect(result.Result).toBeDefined();
expect(result.Error).toBeUndefined(); expect(result.Error).toBeUndefined();
expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/new.json?limit=100'); expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
}); });
test('GIVEN sortBy is NOT supplied, expect it to default to hot', async () => { test('GIVEN sortBy is NOT supplied, expect it to default to hot', async () => {
@ -48,6 +53,7 @@ describe('randomBunny', () => {
{ {
data: { data: {
archived: false, archived: false,
author: 'author',
downs: 0, downs: 0,
hidden: false, hidden: false,
permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/', permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/',
@ -69,39 +75,7 @@ describe('randomBunny', () => {
expect(result.Result).toBeDefined(); expect(result.Result).toBeDefined();
expect(result.Error).toBeUndefined(); expect(result.Error).toBeUndefined();
expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/hot.json?limit=100'); expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/hot.json?limit=100');
});
test('GIVEN sortBy is NOT valid, expect it to default to hot', async () => {
fetchMock.mockResolvedValue({
body: JSON.stringify({
data: {
children: [
{
data: {
archived: false,
downs: 0,
hidden: false,
permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/',
subreddit: 'Rabbits',
subreddit_subscribers: 298713,
title: 'Someone told pickles its Monday… *internal fury*',
ups: 1208,
url: 'https://i.redd.it/cr8xudsnkgua1.jpg',
},
},
],
}
}),
});
const result = await randomBunny('rabbits', 'invalid');
expect(result.IsSuccess).toBeTruthy();
expect(result.Result).toBeDefined();
expect(result.Error).toBeUndefined();
expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/hot.json?limit=100');
}); });
test('GIVEN the fetch fails, EXPECT failure result', async () => { test('GIVEN the fetch fails, EXPECT failure result', async () => {
@ -116,7 +90,7 @@ describe('randomBunny', () => {
expect(result.Error!.Code).toBe(ErrorCode.FailedToFetchReddit); expect(result.Error!.Code).toBe(ErrorCode.FailedToFetchReddit);
expect(result.Error!.Message).toBe(ErrorMessages.FailedToFetchReddit); expect(result.Error!.Message).toBe(ErrorMessages.FailedToFetchReddit);
expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/new.json?limit=100'); expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
}); });
test('GIVEN the result is NOT valid JSON, EXPECT failure result', async () => { test('GIVEN the result is NOT valid JSON, EXPECT failure result', async () => {
@ -133,7 +107,7 @@ describe('randomBunny', () => {
expect(result.Error!.Code).toBe(ErrorCode.UnableToParseJSON); expect(result.Error!.Code).toBe(ErrorCode.UnableToParseJSON);
expect(result.Error!.Message).toBe(ErrorMessages.UnableToParseJSON); expect(result.Error!.Message).toBe(ErrorMessages.UnableToParseJSON);
expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/new.json?limit=100'); expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
}); });
test('GIVEN randomSelect does NOT find a response, EXPECT failure result', async () => { test('GIVEN randomSelect does NOT find a response, EXPECT failure result', async () => {
@ -154,7 +128,7 @@ describe('randomBunny', () => {
expect(result.Error!.Code).toBe(ErrorCode.NoImageResultsFound); expect(result.Error!.Code).toBe(ErrorCode.NoImageResultsFound);
expect(result.Error!.Message).toBe(ErrorMessages.NoImageResultsFound); expect(result.Error!.Message).toBe(ErrorMessages.NoImageResultsFound);
expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/new.json?limit=100'); expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
}); });
test('GIVEN randomSelect does NOT find a valid response, EXPECT failure result', async () => { test('GIVEN randomSelect does NOT find a valid response, EXPECT failure result', async () => {
@ -165,6 +139,7 @@ describe('randomBunny', () => {
{ {
data: { data: {
archived: false, archived: false,
author: 'author',
downs: 0, downs: 0,
hidden: false, hidden: false,
permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/', permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/',
@ -189,8 +164,8 @@ describe('randomBunny', () => {
expect(result.Error!.Code).toBe(ErrorCode.NoImageResultsFound); expect(result.Error!.Code).toBe(ErrorCode.NoImageResultsFound);
expect(result.Error!.Message).toBe(ErrorMessages.NoImageResultsFound); expect(result.Error!.Message).toBe(ErrorMessages.NoImageResultsFound);
expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/new.json?limit=100'); expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/new.json?limit=100'); expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
}); });
test("GIVEN data fetched is a gallery AND an image is returned from the helper, EXPECT this to be used", async () => { test("GIVEN data fetched is a gallery AND an image is returned from the helper, EXPECT this to be used", async () => {
@ -201,6 +176,7 @@ describe('randomBunny', () => {
{ {
data: { data: {
archived: false, archived: false,
author: 'author',
downs: 0, downs: 0,
hidden: false, hidden: false,
permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/', permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/',
@ -223,7 +199,7 @@ describe('randomBunny', () => {
expect(result.IsSuccess).toBeTruthy(); expect(result.IsSuccess).toBeTruthy();
expect(result.Result).toBeDefined(); expect(result.Result).toBeDefined();
expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/new.json?limit=100'); expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
expect(ImageHelper.FetchImageFromRedditGallery).toHaveBeenCalledTimes(1); expect(ImageHelper.FetchImageFromRedditGallery).toHaveBeenCalledTimes(1);
expect(ImageHelper.FetchImageFromRedditGallery).toHaveBeenCalledWith("https://i.redd.it/gallery/cr8xudsnkgua1"); expect(ImageHelper.FetchImageFromRedditGallery).toHaveBeenCalledWith("https://i.redd.it/gallery/cr8xudsnkgua1");
@ -237,6 +213,7 @@ describe('randomBunny', () => {
{ {
data: { data: {
archived: false, archived: false,
author: 'author',
downs: 0, downs: 0,
hidden: false, hidden: false,
permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/', permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/',
@ -263,4 +240,109 @@ describe('randomBunny', () => {
expect(result.Error?.Code).toBe(ErrorCode.NoImageResultsFound); expect(result.Error?.Code).toBe(ErrorCode.NoImageResultsFound);
expect(result.Error?.Message).toBe(ErrorMessages.NoImageResultsFound); expect(result.Error?.Message).toBe(ErrorMessages.NoImageResultsFound);
}); });
test("GIVEN limit is supplied, EXPECT limit sent to the API", async () => {
fetchMock.mockResolvedValue({
body: JSON.stringify({
data: {
children: [
{
data: {
archived: false,
author: 'author',
downs: 0,
hidden: false,
permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/',
subreddit: 'Rabbits',
subreddit_subscribers: 298713,
title: 'Someone told pickles its Monday… *internal fury*',
ups: 1208,
url: 'https://i.redd.it/cr8xudsnkgua1.jpg',
},
},
],
}
}),
});
const result = await randomBunny('rabbits', 'new', 50);
expect(result.IsSuccess).toBeTruthy();
expect(result.Result).toBeDefined();
expect(result.Error).toBeUndefined();
expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=50');
});
test("GIVEN limit is less than 1, EXPECT error to be returned", async () => {
fetchMock.mockResolvedValue({
body: JSON.stringify({
data: {
children: [
{
data: {
archived: false,
author: 'author',
downs: 0,
hidden: false,
permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/',
subreddit: 'Rabbits',
subreddit_subscribers: 298713,
title: 'Someone told pickles its Monday… *internal fury*',
ups: 1208,
url: 'https://i.redd.it/cr8xudsnkgua1.jpg',
},
},
],
}
}),
});
const result = await randomBunny('rabbits', 'new', 0);
expect(result.IsSuccess).toBeFalsy();
expect(result.Result).toBeUndefined();
expect(result.Error).toBeDefined();
expect(result.Error!.Code).toBe(ErrorCode.LimitOutOfRange);
expect(result.Error!.Message).toBe(ErrorMessages.LimitOutOfRange);
expect(fetchMock).not.toHaveBeenCalled();
});
test("GIVEN limit is greater than 100, EXPECT error to be returned", async () => {
fetchMock.mockResolvedValue({
body: JSON.stringify({
data: {
children: [
{
data: {
archived: false,
author: 'author',
downs: 0,
hidden: false,
permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/',
subreddit: 'Rabbits',
subreddit_subscribers: 298713,
title: 'Someone told pickles its Monday… *internal fury*',
ups: 1208,
url: 'https://i.redd.it/cr8xudsnkgua1.jpg',
},
},
],
}
}),
});
const result = await randomBunny('rabbits', 'new', 101);
expect(result.IsSuccess).toBeFalsy();
expect(result.Result).toBeUndefined();
expect(result.Error).toBeDefined();
expect(result.Error!.Code).toBe(ErrorCode.LimitOutOfRange);
expect(result.Error!.Message).toBe(ErrorMessages.LimitOutOfRange);
expect(fetchMock).not.toHaveBeenCalled();
});
}); });

1399
yarn.lock

File diff suppressed because it is too large Load diff