Compare commits

..

No commits in common. "045ce9518636e75c84e063cf63317622cfb4b82d" and "41195de16ab6588bacf44da506aa59ddf7c576f1" have entirely different histories.

29 changed files with 1289 additions and 1237 deletions

1
.eslintignore Normal file
View file

@ -0,0 +1 @@
dist/

50
.eslintrc Normal file
View file

@ -0,0 +1,50 @@
{
"parserOptions": {
"ecmaVersion": 6
},
"extends": [
"eslint:recommended"
],
"rules": {
"camelcase": "error",
"brace-style": [
"error",
"1tbs"
],
"comma-dangle": [
"error",
"never"
],
"comma-spacing": [
"error",
{
"before": false,
"after": true
}
],
"comma-style": [
"error",
"last"
],
"arrow-body-style": [
"error",
"as-needed"
],
"arrow-parens": [
"error",
"as-needed"
],
"arrow-spacing": "error",
"no-var": "error",
"prefer-template": "error",
"prefer-const": "error"
},
"globals": {
"exports": "writable",
"module": "writable",
"require": "writable",
"process": "writable",
"console": "writable",
"jest": "writable"
}
}

View file

@ -1,38 +0,0 @@
name: Publish to NPM
on:
push:
tags:
- '*'
jobs:
build:
runs-on: node
steps:
- uses: actions/checkout@v4
- name: Use Node.js
uses: actions/setup-node@v4
with:
node-version: 20.x
- run: yarn install --frozen-lockfile
- run: yarn build
- run: yarn test
- run: yarn lint
publish:
needs: build
runs-on: node
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: 20.x
- run: yarn install --frozen-lockfile
- run: yarn build
- run: yarn package
- run: cp -r ./bin ${{ secrets.PROD_REPO_PATH }}
- uses: https://github.com/JS-DevTools/npm-publish@v3
with:
token: ${{ secrets.NPM_TOKEN }}

View file

@ -1,35 +0,0 @@
name: Stage
on:
push:
branches:
- develop
jobs:
build:
runs-on: node
steps:
- uses: actions/checkout@v4
- name: Use Node.js
uses: actions/setup-node@v4
with:
node-version: 20.x
- run: yarn install --frozen-lockfile
- run: yarn build
- run: yarn test
- run: yarn lint
rsync:
needs: build
runs-on: node
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: 20.x
- run: yarn install --frozen-lockfile
- run: yarn build
- run: yarn package
- run: cp -r ./bin ${{ secrets.STAGE_REPO_PATH }}

View file

@ -1,25 +0,0 @@
name: Test
on:
push:
branches:
- feature/*
- hotfix/*
- renovate/*
- dependabot/*
jobs:
build:
runs-on: node
steps:
- uses: actions/checkout@v4
- name: Use Node.js
uses: actions/setup-node@v4
with:
node-version: 20.x
- run: yarn install --frozen-lockfile
- run: yarn build
- run: yarn test
- run: yarn lint

92
.woodpecker.yml Normal file
View file

@ -0,0 +1,92 @@
steps:
build:
# Build and test
image: node
commands:
- yarn install --frozen-lockfile
- yarn build
when:
event: [ push, pull_request ]
branch: [ hotfix/*, feature/*, renovate/*, develop, master ]
lint:
image: node
commands:
- yarn lint
when:
event: [ push, pull_request ]
branch: [ hotfix/*, feature/*, renovate/*, develop, master ]
test:
image: node
commands:
- yarn test
when:
event: [ push, pull_request ]
branch: [ hotfix/*, feature/*, renovate/*, develop, master ]
# Package production build
package_prod:
image: node
commands:
- apt install unzip -y
- wget https://github.com/xerub/ldid/releases/download/42/ldid.zip
- unzip ldid.zip
- mkdir /opt/ldid/
- cp linux64/ldid /opt/ldid/
- export PATH=/opt/ldid:$PATH
- yarn package
when:
event: push
branch: master
rsync_prod:
image: alpine
secrets: [ ssh_key ]
commands:
- apk add rsync openssh-client
- eval `ssh-agent -s`
- echo "$SSH_KEY" | tr -d '\r' | ssh-add -
- rsync -e "ssh -o StrictHostKeyChecking=no" -r ./bin vylpes@192.168.1.115:/home/vylpes/apps/random-bunny/random-bunny_prod
when:
event: push
branch: master
# Package stage build
package_stage:
image: node
commands:
- apt install unzip -y
- wget https://github.com/xerub/ldid/releases/download/42/ldid.zip
- unzip ldid.zip
- mkdir /opt/ldid/
- cp linux64/ldid /opt/ldid/
- export PATH=/opt/ldid:$PATH
- yarn package
when:
event: push
branch: develop
rsync_stage:
image: alpine
secrets: [ ssh_key ]
commands:
- apk add rsync openssh-client
- eval `ssh-agent -s`
- echo "$SSH_KEY" | tr -d '\r' | ssh-add -
- rsync -e "ssh -o StrictHostKeyChecking=no" -r ./bin vylpes@192.168.1.115:/home/vylpes/apps/random-bunny/random-bunny_stage
when:
event: push
branch: develop
# Publish to NPM
publish:
image: plugins/npm
settings:
username:
from_secret: NPM_USERNAME
email:
from_secret: NPM_EMAIL
password:
from_secret: NPM_PASSWORD
token:
from_secret: NPM_TOKEN
when:
event: push
branch: master

View file

@ -4,7 +4,7 @@ Since Version 2.2, Random Bunny contains a command line interface (CLI).
## Downloads ## Downloads
The project can be downloaded as a binary for your system via the [GitHub Releases](https://github.com/Vylpes/random-bunny/releases) or [Forgejo Releases](https://git.vylpes.xyz/RabbitLabs/random-bunny/releases) page. The project can be downloaded as a binary for your system via the [GitHub Releases](https://github.com/Vylpes/random-bunny/releases) or [Gitea Releases](https://gitea.vylpes.xyz/RabbitLabs/random-bunny/releases) page.
We currently support: We currently support:
- Linux (x64) - Linux (x64)
@ -13,8 +13,6 @@ We currently support:
The git repository can also be cloned and ran via `yarn build` and `yarn start`. The git repository can also be cloned and ran via `yarn build` and `yarn start`.
You can produce the binary using the `yarn package` command. This creates the binaries in the `./bin` folder.
> **NOTE:** We are aware of a bug in the macOS Arm64 builds failing to execute. For now you're still able to use the x64 builds under Rosetta fine. This will hopefully be fixed in a future release. > **NOTE:** We are aware of a bug in the macOS Arm64 builds failing to execute. For now you're still able to use the x64 builds under Rosetta fine. This will hopefully be fixed in a future release.
## Default Output ## Default Output
@ -22,10 +20,9 @@ You can produce the binary using the `yarn package` command. This creates the bi
By default, the command will fetch a random image from `r/rabbits` and return it in a human-readable output. By default, the command will fetch a random image from `r/rabbits` and return it in a human-readable output.
``` ```
$ random-bunny $ randombunny
Archived = false Archived = false
Author = Rabbit_Owner
Downvotes = 0 Downvotes = 0
Hidden = false Hidden = false
Permalink = /r/Rabbits/comments/1av1rg9/cute_baby_bun/ Permalink = /r/Rabbits/comments/1av1rg9/cute_baby_bun/
@ -41,11 +38,11 @@ Url = https://i.redd.it/sfz0srdrimjc1.png
The command also includes a help option in case you are stuck. The command also includes a help option in case you are stuck.
``` ```
$ random-bunny --help $ randombunny --help
# or # or
$ random-bunny -h $ randombunny -h
Usage: random-bunny [options] Usage: random-bunny [options]
@ -56,10 +53,9 @@ Options:
-s, --subreddit <subreddit> The subreddit to search (default: "rabbits") -s, --subreddit <subreddit> The subreddit to search (default: "rabbits")
-j, --json Output as JSON -j, --json Output as JSON
-q, --query-metadata Include query metadata in result -q, --query-metadata Include query metadata in result
-o <file> Output to file
--sort <sort> Sort by (choices: "hot", "new", "top", default: "hot") --sort <sort> Sort by (choices: "hot", "new", "top", default: "hot")
--limit <limit> The amount of posts to fetch from the reddit api (default: 100)
-h, --help display help for command -h, --help display help for command
✨ Done in 0.32s.
``` ```
## JSON output ## JSON output
@ -67,13 +63,13 @@ Options:
You can also convert the output into JSON, if you need to input it to another program. You can also convert the output into JSON, if you need to input it to another program.
```bash ```bash
$ random-bunny --json $ randombunny --json
# or # or
$ randon-bunny -j $ randonbunny -j
{"Archived":false,"Author":"Rabbit_Owner","Downs":0,"Hidden":false,"Permalink":"/r/Rabbits/comments/1av1rg9/cute_baby_bun/","Subreddit":"Rabbits","SubredditSubscribers":486085,"Title":"Cute baby bun","Ups":210,"Url":"https://i.redd.it/sfz0srdrimjc1.png"} {"Archived":false,"Downs":0,"Hidden":false,"Permalink":"/r/Rabbits/comments/1av1rg9/cute_baby_bun/","Subreddit":"Rabbits","SubredditSubscribers":486085,"Title":"Cute baby bun","Ups":210,"Url":"https://i.redd.it/sfz0srdrimjc1.png"}
``` ```
## Sort ## Sort
@ -83,9 +79,9 @@ You can also choose the sorting option which reddit will use to return the avail
This defaults to "hot". The valid options are "hot", "new", and "top". This defaults to "hot". The valid options are "hot", "new", and "top".
``` ```
$ random-bunny --sort hot $ randombunny --sort hot
$ random-bunny --sort new $ randombunny --sort new
$ random-bunny --sort top $ randomBunny --sort top
``` ```
@ -96,28 +92,6 @@ You can change the subreddit which the command fetches from.
This defaults to "rabbits" This defaults to "rabbits"
``` ```
$ random-bunny --subreddit rabbits $ randombunny --subreddit rabbits
$ random-bunny -s horses $ randombunny -s horses
```
## Output to file
If you'd rather send the output to a file, you can supply the `-o` flag.
```
$ randombunny -o ~/Desktop/output.txt
```
## Reddit API Return Limits
You can also limit the amount the posts the script requests from the Reddit API
using the `--limit` option.
This defaults to 100. This accepts any number between 1 and 100.
Please note limiting the calls to less than 100 will give a higher chance of
the script not finding any valid image post to return.
```
$ random-bunny --limit 50
``` ```

View file

@ -1,54 +0,0 @@
import js from "@eslint/js";
import ts from "typescript-eslint";
export default [
{
ignores: [
"**/dist/",
"eslint.config.mjs",
"jest.config.cjs",
"jest.setup.js"
],
},
js.configs.recommended,
...ts.configs.recommended,
{
languageOptions: {
globals: {
exports: "writable",
module: "writable",
require: "writable",
process: "writable",
console: "writable",
jest: "writable",
},
ecmaVersion: 6,
sourceType: "script",
},
files: [
"./src",
"./tests"
],
rules: {
camelcase: "error",
"brace-style": ["error", "1tbs"],
"comma-dangle": ["error", "never"],
"comma-spacing": ["error", {
before: false,
after: true,
}],
"comma-style": ["error", "last"],
"arrow-body-style": ["error", "as-needed"],
"arrow-parens": ["error", "as-needed"],
"arrow-spacing": "error",
"no-var": "error",
"prefer-template": "error",
"prefer-const": "error",
},
}
];

View file

@ -1,6 +1,6 @@
{ {
"name": "random-bunny", "name": "random-bunny",
"version": "2.3.0", "version": "2.2.0",
"description": "Get a random subreddit image url", "description": "Get a random subreddit image url",
"license": "MIT", "license": "MIT",
"author": "Vylpes", "author": "Vylpes",
@ -39,22 +39,19 @@
"homepage": "https://gitea.vylpes.xyz/RabbitLabs/random-bunny", "homepage": "https://gitea.vylpes.xyz/RabbitLabs/random-bunny",
"funding": "https://ko-fi.com/vylpes", "funding": "https://ko-fi.com/vylpes",
"devDependencies": { "devDependencies": {
"@eslint/eslintrc": "^3.1.0", "@types/eslint": "^8.21.1",
"@eslint/js": "^9.8.0",
"@types/eslint": "^9.6.0",
"@types/jest": "^29.5.8", "@types/jest": "^29.5.8",
"@types/node": "^20.0.0", "@types/node": "^20.0.0",
"@typescript-eslint/eslint-plugin": "^7.18.0", "@typescript-eslint/eslint-plugin": "^7.0.0",
"@typescript-eslint/parser": "^7.18.0", "@typescript-eslint/parser": "^5.54.0",
"@yao-pkg/pkg": "^5.12.0", "eslint": "^8.49.0",
"eslint": "^9.8.0",
"jest": "^29.7.0", "jest": "^29.7.0",
"jest-mock-extended": "^3.0.3", "jest-mock-extended": "^3.0.3",
"np": "^10.0.0", "np": "^9.0.0",
"pkg": "^5.8.1",
"ts-jest": "^29.1.1", "ts-jest": "^29.1.1",
"ts-mockito": "^2.6.1", "ts-mockito": "^2.6.1",
"typescript": "^5.0.0", "typescript": "^5.0.0"
"typescript-eslint": "^7.18.0"
}, },
"resolutions": { "resolutions": {
"np/**/got": "^14.0.0", "np/**/got": "^14.0.0",

View file

@ -33,11 +33,10 @@ console.log(result);
### `randomBunny()` ### `randomBunny()`
Returns a `json string` for a random post. Accepts 3 arguments: `subreddit`, `sortby` ('new', 'hot', 'top'), and `limit` (1-100, default 100) Returns a `json string` for a random post. Accepts 2 arguments: `subreddit`, and `sortby` ('new', 'hot', 'top')
The json string which gets returned consists of: The json string which gets returned consists of:
- archived - archived
- author
- downs - downs
- hidden - hidden
- permalink - permalink

View file

@ -1,5 +1,4 @@
{ {
"$schema": "https://docs.renovatebot.com/renovate-schema.json", "$schema": "https://docs.renovatebot.com/renovate-schema.json",
"baseBranches": ["develop"], "baseBranches": ["develop"]
"labels": ["type/dependencies"]
} }

View file

@ -2,24 +2,55 @@ import { Command, Option } from "commander";
import randomBunny from "./index"; import randomBunny from "./index";
import ICliOptions from "./contracts/ICliOptions"; import ICliOptions from "./contracts/ICliOptions";
import { exit } from "process"; import { exit } from "process";
import CliHelper from "./helpers/cliHelper";
const program = new Command(); const program = new Command();
program program
.name('random-bunny') .name('random-bunny')
.description('Get a random image url from a subreddit of your choosing') .description('Get a random image url from a subreddit of your choosing')
.version('2.3') .version('2.2')
.option('-s, --subreddit <subreddit>', 'The subreddit to search', 'rabbits') .option('-s, --subreddit <subreddit>', 'The subreddit to search', 'rabbits')
.option('-j, --json', 'Output as JSON') .option('-j, --json', 'Output as JSON')
.option('-q, --query-metadata', 'Include query metadata in result') .option('-q, --query-metadata', 'Include query metadata in result')
.option('-o <file>', 'Output to file') .addOption(new Option('--sort <sort>', 'Sort by').default('hot').choices(['hot', 'new', 'top']));
.addOption(new Option('--sort <sort>', 'Sort by').default('hot').choices(['hot', 'new', 'top']))
.addOption(new Option('--limit <limit>', 'The amount of posts to fetch from the reddit api').default(100));
program.parse(); program.parse();
const options: ICliOptions = program.opts(); const options: ICliOptions = program.opts();
randomBunny(options.subreddit, options.sort, options.limit) randomBunny(options.subreddit, options.sort)
.then((response) => exit(CliHelper.Endpoint(response, options))); .then((response) => {
if (response.IsSuccess) {
const result = response.Result!;
const outputLines: string[] = [];
if (options.json) {
console.log(JSON.stringify(result));
return;
}
outputLines.push(`Archived = ${result.Archived}`);
outputLines.push(`Downvotes = ${result.Downs}`);
outputLines.push(`Hidden = ${result.Hidden}`);
outputLines.push(`Permalink = ${result.Permalink}`);
outputLines.push(`Subreddit = ${result.Subreddit}`);
outputLines.push(`Subreddit Subscribers = ${result.SubredditSubscribers}`);
outputLines.push(`Title = ${result.Title}`);
outputLines.push(`Upvotes = ${result.Ups}`);
outputLines.push(`Url = ${result.Url}`);
if (options.queryMetadata != null) {
outputLines.push(`Query.Subreddit = ${response.Query.subreddit}`);
outputLines.push(`Query.Sort By = ${response.Query.sortBy}`);
}
console.log(outputLines.join("\n"));
exit(0);
} else {
const error = response.Error!;
console.error(error.Message, error.Code);
exit(1);
}
});

View file

@ -3,5 +3,4 @@ export enum ErrorCode {
FailedToFetchReddit, FailedToFetchReddit,
UnableToParseJSON, UnableToParseJSON,
NoImageResultsFound, NoImageResultsFound,
LimitOutOfRange,
} }

View file

@ -2,5 +2,4 @@ export default class ErrorMessages {
public static readonly FailedToFetchReddit = "Failed to fetch result from Reddit"; public static readonly FailedToFetchReddit = "Failed to fetch result from Reddit";
public static readonly UnableToParseJSON = "Unable to parse the JSON result"; public static readonly UnableToParseJSON = "Unable to parse the JSON result";
public static readonly NoImageResultsFound = "No image results found in response from Reddit"; public static readonly NoImageResultsFound = "No image results found in response from Reddit";
public static readonly LimitOutOfRange = "Limit must be a number between 1 and 100";
} }

View file

@ -1,8 +1,6 @@
export default interface ICliOptions { export default interface ICliOptions {
subreddit: string, subreddit: string,
json?: boolean, json?: boolean,
sort: "new" | "hot" | "top", sort: string,
o?: string,
limit: number,
queryMetadata?: boolean, queryMetadata?: boolean,
} }

View file

@ -1,7 +1,6 @@
export default interface IFetchResult { export default interface IFetchResult {
data: { data: {
archived: boolean, archived: boolean,
author: string,
downs: number, downs: number,
hidden: boolean, hidden: boolean,
permalink: string, permalink: string,

View file

@ -1,6 +1,5 @@
export default interface IRedditResult { export default interface IRedditResult {
Archived: boolean, Archived: boolean,
Author: string,
Downs: number, Downs: number,
Hidden: boolean, Hidden: boolean,
Permalink: string, Permalink: string,

View file

@ -1,5 +1,4 @@
export default interface QueryResult { export default interface QueryResult {
subreddit: string, subreddit: string,
sortBy: string, sortBy: string,
limit: number,
} }

View file

@ -1,25 +0,0 @@
import { writeFileSync } from "fs";
import ICliOptions from "../contracts/ICliOptions";
import IReturnResult from "../contracts/IReturnResult";
import OutputHelper from "./outputHelper";
export default class CliHelper {
public static Endpoint(response: IReturnResult, options: ICliOptions): number {
if (response.IsSuccess) {
const output = OutputHelper.GenerateOutput(response, options);
if (options.o) {
writeFileSync(options.o, `${output}\n`);
} else {
console.log(output);
}
return 0;
} else {
const error = response.Error!;
console.error(error.Message, error.Code);
return 1;
}
}
}

View file

@ -1,33 +0,0 @@
import ICliOptions from "../contracts/ICliOptions";
import IReturnResult from "../contracts/IReturnResult";
export default class OutputHelper {
public static GenerateOutput(response: IReturnResult, options: ICliOptions): string {
const result = response.Result!;
const outputLines: string[] = [];
if (options.json) {
return JSON.stringify(result);
}
outputLines.push(`Archived = ${result.Archived}`);
outputLines.push(`Author = ${result.Author}`);
outputLines.push(`Downvotes = ${result.Downs}`);
outputLines.push(`Hidden = ${result.Hidden}`);
outputLines.push(`Permalink = ${result.Permalink}`);
outputLines.push(`Subreddit = ${result.Subreddit}`);
outputLines.push(`Subreddit Subscribers = ${result.SubredditSubscribers}`);
outputLines.push(`Title = ${result.Title}`);
outputLines.push(`Upvotes = ${result.Ups}`);
outputLines.push(`Url = ${result.Url}`);
if (options.queryMetadata != null) {
outputLines.push(`Query.Subreddit = ${response.Query.subreddit}`);
outputLines.push(`Query.Sort By = ${response.Query.sortBy}`);
outputLines.push(`Query.Limit = ${response.Query.limit}`);
}
return outputLines.join("\n");
}
}

View file

@ -5,25 +5,18 @@ import { List } from 'linqts';
import IFetchResult from "./contracts/IFetchResult"; import IFetchResult from "./contracts/IFetchResult";
import { ErrorCode } from "./constants/ErrorCode"; import { ErrorCode } from "./constants/ErrorCode";
import ErrorMessages from "./constants/ErrorMessages"; import ErrorMessages from "./constants/ErrorMessages";
import ImageHelper from "./helpers/imageHelper"; import ImageHelper from "./imageHelper";
export default async function randomBunny(subreddit: string, sortBy: "new" | "hot" | "top" = 'hot', limit: number = 100): Promise<IReturnResult> { const sortable = [
if (limit < 1 || limit > 100) { 'new',
return { 'hot',
IsSuccess: false, 'top'
Query: { ];
subreddit: subreddit,
sortBy: sortBy,
limit: limit,
},
Error: {
Code: ErrorCode.LimitOutOfRange,
Message: ErrorMessages.LimitOutOfRange,
}
};
}
const result = await fetch(`https://reddit.com/r/${subreddit}/${sortBy}.json?limit=${limit}`) export default async function randomBunny(subreddit: string, sortBy: string = 'hot'): Promise<IReturnResult> {
if (!sortable.includes(sortBy)) sortBy = 'hot';
const result = await fetch(`https://reddit.com/r/${subreddit}/${sortBy}.json?limit=100`)
.then((res) => { .then((res) => {
return res; return res;
}) })
@ -37,7 +30,6 @@ export default async function randomBunny(subreddit: string, sortBy: "new" | "ho
Query: { Query: {
subreddit: subreddit, subreddit: subreddit,
sortBy: sortBy, sortBy: sortBy,
limit: limit,
}, },
Error: { Error: {
Code: ErrorCode.FailedToFetchReddit, Code: ErrorCode.FailedToFetchReddit,
@ -54,7 +46,6 @@ export default async function randomBunny(subreddit: string, sortBy: "new" | "ho
Query: { Query: {
subreddit: subreddit, subreddit: subreddit,
sortBy: sortBy, sortBy: sortBy,
limit: limit,
}, },
Error: { Error: {
Code: ErrorCode.UnableToParseJSON, Code: ErrorCode.UnableToParseJSON,
@ -77,7 +68,6 @@ export default async function randomBunny(subreddit: string, sortBy: "new" | "ho
Query: { Query: {
subreddit: subreddit, subreddit: subreddit,
sortBy: sortBy, sortBy: sortBy,
limit: limit,
}, },
Error: { Error: {
Code: ErrorCode.NoImageResultsFound, Code: ErrorCode.NoImageResultsFound,
@ -103,7 +93,6 @@ export default async function randomBunny(subreddit: string, sortBy: "new" | "ho
Query: { Query: {
subreddit: subreddit, subreddit: subreddit,
sortBy: sortBy, sortBy: sortBy,
limit: limit,
}, },
Error: { Error: {
Code: ErrorCode.NoImageResultsFound, Code: ErrorCode.NoImageResultsFound,
@ -118,7 +107,6 @@ export default async function randomBunny(subreddit: string, sortBy: "new" | "ho
} }
const redditResult: IRedditResult = { const redditResult: IRedditResult = {
Author: randomData['author'],
Archived: randomData['archived'], Archived: randomData['archived'],
Downs: randomData['downs'], Downs: randomData['downs'],
Hidden: randomData['hidden'], Hidden: randomData['hidden'],
@ -135,7 +123,6 @@ export default async function randomBunny(subreddit: string, sortBy: "new" | "ho
Query: { Query: {
subreddit: subreddit, subreddit: subreddit,
sortBy: sortBy, sortBy: sortBy,
limit: limit,
}, },
Result: redditResult Result: redditResult
}; };

187
tests/cli.test.ts Normal file
View file

@ -0,0 +1,187 @@
import { exec } from "child_process";
import path from "path";
describe('default', () => {
test('GIVEN no options are supplied, EXPECT standard output', async () => {
const result = await cli([], '.');
const keys = result.stdout.split('\n')
.flatMap(x => x.split(' = ')[0])
.filter(x => x && x.length > 0);
const values = result.stdout.split('\n')
.flatMap(x => x.split(' = ')[1])
.filter(x => x && x.length > 0);
expect(result.code).toBe(0);
expect(keys).toStrictEqual(['Archived', 'Downvotes', 'Hidden', 'Permalink', 'Subreddit', 'Subreddit Subscribers', 'Title', 'Upvotes', 'Url']);
expect(values.length).toBe(9);
}, 5000);
test('GIVEN an error occurs, EXPECT error output', async () => {
const result = await cli(['-s', 'textonly'], '.');
expect(result.code).toBe(1);
expect(result.stderr).toBeDefined();
}, 5000);
});
describe('version', () => {
test('GIVEN -V flag is supplied, EXPECT version returned', async () => {
const result = await cli(['-V'], '.');
expect(result.code).toBe(0);
expect(result.stdout).toBe('2.2\n');
});
test('GIVEN --version is supplied, EXPECT version returned', async () => {
const result = await cli(['--version'], '.');
expect(result.code).toBe(0);
expect(result.stdout).toBe('2.2\n');
});
});
describe('help', () => {
test('GIVEN -h is supplied, EXPECT help returned', async () => {
const result = await cli(['-h'], '.');
expect(result.code).toBe(0);
expect(result.stdout.split('\n')[0]).toBe('Usage: random-bunny [options]');
});
test('GIVEN --help is supplied, EXPECT help returned', async () => {
const result = await cli(['--help'], '.');
expect(result.code).toBe(0);
expect(result.stdout.split('\n')[0]).toBe('Usage: random-bunny [options]');
});
});
describe('subreddit', () => {
test('GIVEN -s is not supplied, EXPECT subreddit to be defaulted', async () => {
const result = await cli([], '.');
const subreddit = result.stdout.split('\n')
.find(x => x && x.length > 0 && x.split(' = ')[0] == 'Subreddit')!
.split(' = ')[1];
expect(subreddit).toBe('Rabbits');
}, 5000);
test('GIVEN -s is supplied, EXPECT subreddit to be changed', async () => {
const result = await cli(['-s', 'pics'], '.');
const subreddit = result.stdout.split('\n')
.find(x => x && x.length > 0 && x.split(' = ')[0] == 'Subreddit')!
.split(' = ')[1];
expect(subreddit).toBe('pics');
}, 5000);
test('GIVEN --subreddit is supplied, EXPECT subreddit to be changed', async () => {
const result = await cli(['--subreddit', 'pics'], '.');
const subreddit = result.stdout.split('\n')
.find(x => x && x.length > 0 && x.split(' = ')[0] == 'Subreddit')!
.split(' = ')[1];
expect(subreddit).toBe('pics');
}, 5000);
});
describe('json', () => {
test('GIVEN -j is supplied, EXPECT output to be valid JSON', async () => {
const result = await cli(['-j'], '.');
const json = JSON.parse(result.stdout);
expect(json).toBeDefined();
}, 5000);
test('GIVEN --json is supplied, EXPECT output to be valid JSON', async () => {
const result = await cli(['--json'], '.');
const json = JSON.parse(result.stdout);
expect(json).toBeDefined();
}, 5000);
});
describe('sort', () => {
test('GIVEN --sort is not supplied, EXPECT sort to be defaulted', async () => {
const result = await cli(['-q'], '.');
const sortBy = result.stdout.split('\n')
.find(x => x && x.length > 0 && x.split(' = ')[0] == 'Query.Sort By')!
.split(' = ')[1];
expect(sortBy).toBe('hot');
}, 5000);
test('GIVEN --sort is supplied WITH a valid input, EXPECT sort to be used', async () => {
const result = await cli(['-q', '--sort', 'new'], '.');
const sortBy = result.stdout.split('\n')
.find(x => x && x.length > 0 && x.split(' = ')[0] == 'Query.Sort By')!
.split(' = ')[1];
expect(sortBy).toBe('new');
}, 5000);
test('GIVEN --sort is supplied WITH an invalid input, EXPECT error', async () => {
const result = await cli(['-q', '--sort', 'invalid'], '.');
expect(result.code).toBe(1);
expect(result.stderr).toBe("error: option '--sort <sort>' argument 'invalid' is invalid. Allowed choices are hot, new, top.\n");
}, 5000);
});
describe('query-metadata', () => {
test('GIVEN --query-metadata is not supplied, EXPECT no query metadata returned', async () => {
const result = await cli([], '.');
const query = result.stdout.split('\n')
.find(x => x && x.length > 0 && x.split(' = ')[0].startsWith('Query'));
expect(query).toBeUndefined();
}, 5000);
test('GIVEN --query-metadata is supplied, EXPECT query metadata returned', async () => {
const result = await cli(['--query-metadata'], '.');
const query = result.stdout.split('\n')
.find(x => x && x.length > 0 && x.split(' = ')[0].startsWith('Query'));
expect(query).toBeDefined();
}, 5000);
test('GIVEN -q is supplied, EXPECT query metadata returned', async () => {
const result = await cli(['-q'], '.');
const query = result.stdout.split('\n')
.find(x => x && x.length > 0 && x.split(' = ')[0].startsWith('Query'));
expect(query).toBeDefined();
}, 5000);
});
function cli(args: string[], cwd: string): Promise<cliResult> {
return new Promise(resolve => {
exec(`node ${path.resolve('./dist/cli.js')} ${args.join(' ')}`,
{ cwd },
(error, stdout, stderr) => { resolve({
code: error && error.code ? error.code : 0,
error,
stdout,
stderr });
});
});
}
interface cliResult {
code: number,
error: any,
stdout: string,
stderr: string,
}

View file

@ -1,32 +0,0 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`GenerateOutput EXPECT standout output to be returned 1`] = `
"Archived = false
Author = author
Downvotes = 0
Hidden = false
Permalink = /r/Rabbits/comments/1dj8pbt/this_is_my_ms_bear/
Subreddit = Rabbits
Subreddit Subscribers = 654751
Title = This is my Ms Bear!
Upvotes = 17
Url = https://preview.redd.it/d5yno653zf7d1.jpg?width=640&crop=smart&auto=webp&s=5064d1caec3c12ac2855eb57ff131d0b313d5e9d"
`;
exports[`GenerateOutput GIVEN options.json is true, EXPECT output to be returned as JSON 1`] = `"{"Archived":false,"Author":"author","Downs":0,"Hidden":false,"Permalink":"/r/Rabbits/comments/1dj8pbt/this_is_my_ms_bear/","Subreddit":"Rabbits","SubredditSubscribers":654751,"Title":"This is my Ms Bear!","Ups":17,"Url":"https://preview.redd.it/d5yno653zf7d1.jpg?width=640&crop=smart&auto=webp&s=5064d1caec3c12ac2855eb57ff131d0b313d5e9d"}"`;
exports[`GenerateOutput GIVEN options.queryMetadata is supplied, EXPECT query metadata to be added 1`] = `
"Archived = false
Author = author
Downvotes = 0
Hidden = false
Permalink = /r/Rabbits/comments/1dj8pbt/this_is_my_ms_bear/
Subreddit = Rabbits
Subreddit Subscribers = 654751
Title = This is my Ms Bear!
Upvotes = 17
Url = https://preview.redd.it/d5yno653zf7d1.jpg?width=640&crop=smart&auto=webp&s=5064d1caec3c12ac2855eb57ff131d0b313d5e9d
Query.Subreddit = rabbits
Query.Sort By = hot
Query.Limit = 100"
`;

View file

@ -1,118 +0,0 @@
import fs from "fs";
import CliHelper from "../../src/helpers/cliHelper";
import ICliOptions from "../../src/contracts/ICliOptions";
import IReturnResult from "../../src/contracts/IReturnResult";
import OutputHelper from "../../src/helpers/outputHelper";
import { ErrorCode } from "../../src/constants/ErrorCode";
describe("Endpoint", () => {
describe("GIVEN response is successful", () => {
test("GIVEN options.o is defined, EXPECT output written to file", () => {
// Arrange
const response = {
IsSuccess: true,
} as IReturnResult;
const options = {
o: "file.txt",
} as ICliOptions;
OutputHelper.GenerateOutput = jest.fn().mockReturnValue("test output");
fs.writeFileSync = jest.fn();
console.log = jest.fn();
console.error = jest.fn();
// Act
const result = CliHelper.Endpoint(response, options);
// Assert
expect(result).toBe(0);
expect(OutputHelper.GenerateOutput).toHaveBeenCalledTimes(1);
expect(OutputHelper.GenerateOutput).toHaveBeenCalledWith(response, options);
expect(fs.writeFileSync).toHaveBeenCalledTimes(1);
expect(fs.writeFileSync).toHaveBeenCalledWith("file.txt", "test output\n");
expect(console.log).not.toHaveBeenCalled();
expect(console.error).not.toHaveBeenCalled();
});
test("GIVEN options.o is undefined, EXPECT output logged to console", () => {
// Arrange
const response = {
IsSuccess: true,
} as IReturnResult;
const options = {
o: undefined,
} as ICliOptions;
OutputHelper.GenerateOutput = jest.fn().mockReturnValue("test output");
fs.writeFileSync = jest.fn();
console.log = jest.fn();
console.error = jest.fn();
// Act
const result = CliHelper.Endpoint(response, options);
// Assert
expect(result).toBe(0);
expect(OutputHelper.GenerateOutput).toHaveBeenCalledTimes(1);
expect(OutputHelper.GenerateOutput).toHaveBeenCalledWith(response, options);
expect(fs.writeFileSync).not.toHaveBeenCalled();
expect(console.log).toHaveBeenCalledTimes(1);
expect(console.log).toHaveBeenCalledWith("test output");
expect(console.error).not.toHaveBeenCalled();
});
});
test("GIVEN response is failure, EXPECT error logged to console", () => {
// Arrange
const response = {
IsSuccess: false,
Error: {
Message: "error message",
Code: ErrorCode.FailedToFetchReddit,
},
} as IReturnResult;
const options = {
o: "file.txt",
} as ICliOptions;
OutputHelper.GenerateOutput = jest.fn().mockReturnValue("test output");
fs.writeFileSync = jest.fn();
console.log = jest.fn();
console.error = jest.fn();
// Act
const result = CliHelper.Endpoint(response, options);
// Assert
expect(result).toBe(1);
expect(OutputHelper.GenerateOutput).not.toHaveBeenCalled();
expect(fs.writeFileSync).not.toHaveBeenCalled();
expect(console.log).not.toHaveBeenCalled();
expect(console.error).toHaveBeenCalledTimes(1);
expect(console.error).toHaveBeenCalledWith("error message", ErrorCode.FailedToFetchReddit);
});
});

View file

@ -1,105 +0,0 @@
import ICliOptions from "../../src/contracts/ICliOptions";
import IReturnResult from "../../src/contracts/IReturnResult";
import OutputHelper from "../../src/helpers/outputHelper";
describe("GenerateOutput", () => {
test("EXPECT standout output to be returned", () => {
// Arrange
const response = {
IsSuccess: true,
Query: {
subreddit: "rabbits",
sortBy: "hot",
limit: 100,
},
Result: {
Archived: false,
Author: 'author',
Downs: 0,
Hidden: false,
Permalink: "/r/Rabbits/comments/1dj8pbt/this_is_my_ms_bear/",
Subreddit: "Rabbits",
SubredditSubscribers: 654751,
Title: "This is my Ms Bear!",
Ups: 17,
Url: "https://preview.redd.it/d5yno653zf7d1.jpg?width=640&crop=smart&auto=webp&s=5064d1caec3c12ac2855eb57ff131d0b313d5e9d",
},
} as IReturnResult;
const options = {} as ICliOptions;
// Act
const result = OutputHelper.GenerateOutput(response, options);
// Assert
expect(result).toMatchSnapshot();
});
test("GIVEN options.json is true, EXPECT output to be returned as JSON", () => {
// Arrange
const response = {
IsSuccess: true,
Query: {
subreddit: "rabbits",
sortBy: "hot",
limit: 100,
},
Result: {
Archived: false,
Author: 'author',
Downs: 0,
Hidden: false,
Permalink: "/r/Rabbits/comments/1dj8pbt/this_is_my_ms_bear/",
Subreddit: "Rabbits",
SubredditSubscribers: 654751,
Title: "This is my Ms Bear!",
Ups: 17,
Url: "https://preview.redd.it/d5yno653zf7d1.jpg?width=640&crop=smart&auto=webp&s=5064d1caec3c12ac2855eb57ff131d0b313d5e9d",
},
} as IReturnResult;
const options = {
json: true,
} as ICliOptions;
// Act
const result = OutputHelper.GenerateOutput(response, options);
// Assert
expect(result).toMatchSnapshot();
});
test("GIVEN options.queryMetadata is supplied, EXPECT query metadata to be added", () => {
// Arrange
const response = {
IsSuccess: true,
Query: {
subreddit: "rabbits",
sortBy: "hot",
limit: 100,
},
Result: {
Archived: false,
Author: 'author',
Downs: 0,
Hidden: false,
Permalink: "/r/Rabbits/comments/1dj8pbt/this_is_my_ms_bear/",
Subreddit: "Rabbits",
SubredditSubscribers: 654751,
Title: "This is my Ms Bear!",
Ups: 17,
Url: "https://preview.redd.it/d5yno653zf7d1.jpg?width=640&crop=smart&auto=webp&s=5064d1caec3c12ac2855eb57ff131d0b313d5e9d",
},
} as IReturnResult;
const options = {
queryMetadata: true,
} as ICliOptions;
// Act
const result = OutputHelper.GenerateOutput(response, options);
// Assert
expect(result).toMatchSnapshot();
});
});

View file

@ -1,4 +1,4 @@
import ImageHelper from "../../src/helpers/imageHelper"; import ImageHelper from "../src/imageHelper";
import fetch from "got-cjs"; import fetch from "got-cjs";
jest.mock('got-cjs'); jest.mock('got-cjs');

View file

@ -1,16 +1,12 @@
import { ErrorCode } from "../src/constants/ErrorCode"; import { ErrorCode } from "../src/constants/ErrorCode";
import ErrorMessages from "../src/constants/ErrorMessages"; import ErrorMessages from "../src/constants/ErrorMessages";
import ImageHelper from "../src/helpers/imageHelper"; import ImageHelper from "../src/imageHelper";
import randomBunny from "../src/index"; import randomBunny from "../src/index";
import fetch from "got-cjs"; import fetch from "got-cjs";
jest.mock('got-cjs'); jest.mock('got-cjs');
const fetchMock = jest.mocked(fetch); const fetchMock = jest.mocked(fetch);
beforeEach(() => {
fetchMock.mockReset();
});
describe('randomBunny', () => { describe('randomBunny', () => {
test('GIVEN subreddit AND sortBy is supplied, EXPECT successful result', async() => { test('GIVEN subreddit AND sortBy is supplied, EXPECT successful result', async() => {
fetchMock.mockResolvedValue({ fetchMock.mockResolvedValue({
@ -20,7 +16,6 @@ describe('randomBunny', () => {
{ {
data: { data: {
archived: false, archived: false,
author: 'author',
downs: 0, downs: 0,
hidden: false, hidden: false,
permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/', permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/',
@ -42,7 +37,7 @@ describe('randomBunny', () => {
expect(result.Result).toBeDefined(); expect(result.Result).toBeDefined();
expect(result.Error).toBeUndefined(); expect(result.Error).toBeUndefined();
expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=100'); expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
}); });
test('GIVEN sortBy is NOT supplied, expect it to default to hot', async () => { test('GIVEN sortBy is NOT supplied, expect it to default to hot', async () => {
@ -53,7 +48,6 @@ describe('randomBunny', () => {
{ {
data: { data: {
archived: false, archived: false,
author: 'author',
downs: 0, downs: 0,
hidden: false, hidden: false,
permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/', permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/',
@ -75,7 +69,39 @@ describe('randomBunny', () => {
expect(result.Result).toBeDefined(); expect(result.Result).toBeDefined();
expect(result.Error).toBeUndefined(); expect(result.Error).toBeUndefined();
expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/hot.json?limit=100'); expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/hot.json?limit=100');
});
test('GIVEN sortBy is NOT valid, expect it to default to hot', async () => {
fetchMock.mockResolvedValue({
body: JSON.stringify({
data: {
children: [
{
data: {
archived: false,
downs: 0,
hidden: false,
permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/',
subreddit: 'Rabbits',
subreddit_subscribers: 298713,
title: 'Someone told pickles its Monday… *internal fury*',
ups: 1208,
url: 'https://i.redd.it/cr8xudsnkgua1.jpg',
},
},
],
}
}),
});
const result = await randomBunny('rabbits', 'invalid');
expect(result.IsSuccess).toBeTruthy();
expect(result.Result).toBeDefined();
expect(result.Error).toBeUndefined();
expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/hot.json?limit=100');
}); });
test('GIVEN the fetch fails, EXPECT failure result', async () => { test('GIVEN the fetch fails, EXPECT failure result', async () => {
@ -90,7 +116,7 @@ describe('randomBunny', () => {
expect(result.Error!.Code).toBe(ErrorCode.FailedToFetchReddit); expect(result.Error!.Code).toBe(ErrorCode.FailedToFetchReddit);
expect(result.Error!.Message).toBe(ErrorMessages.FailedToFetchReddit); expect(result.Error!.Message).toBe(ErrorMessages.FailedToFetchReddit);
expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=100'); expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
}); });
test('GIVEN the result is NOT valid JSON, EXPECT failure result', async () => { test('GIVEN the result is NOT valid JSON, EXPECT failure result', async () => {
@ -107,7 +133,7 @@ describe('randomBunny', () => {
expect(result.Error!.Code).toBe(ErrorCode.UnableToParseJSON); expect(result.Error!.Code).toBe(ErrorCode.UnableToParseJSON);
expect(result.Error!.Message).toBe(ErrorMessages.UnableToParseJSON); expect(result.Error!.Message).toBe(ErrorMessages.UnableToParseJSON);
expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=100'); expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
}); });
test('GIVEN randomSelect does NOT find a response, EXPECT failure result', async () => { test('GIVEN randomSelect does NOT find a response, EXPECT failure result', async () => {
@ -128,7 +154,7 @@ describe('randomBunny', () => {
expect(result.Error!.Code).toBe(ErrorCode.NoImageResultsFound); expect(result.Error!.Code).toBe(ErrorCode.NoImageResultsFound);
expect(result.Error!.Message).toBe(ErrorMessages.NoImageResultsFound); expect(result.Error!.Message).toBe(ErrorMessages.NoImageResultsFound);
expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=100'); expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
}); });
test('GIVEN randomSelect does NOT find a valid response, EXPECT failure result', async () => { test('GIVEN randomSelect does NOT find a valid response, EXPECT failure result', async () => {
@ -139,7 +165,6 @@ describe('randomBunny', () => {
{ {
data: { data: {
archived: false, archived: false,
author: 'author',
downs: 0, downs: 0,
hidden: false, hidden: false,
permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/', permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/',
@ -164,8 +189,8 @@ describe('randomBunny', () => {
expect(result.Error!.Code).toBe(ErrorCode.NoImageResultsFound); expect(result.Error!.Code).toBe(ErrorCode.NoImageResultsFound);
expect(result.Error!.Message).toBe(ErrorMessages.NoImageResultsFound); expect(result.Error!.Message).toBe(ErrorMessages.NoImageResultsFound);
expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=100'); expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=100'); expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
}); });
test("GIVEN data fetched is a gallery AND an image is returned from the helper, EXPECT this to be used", async () => { test("GIVEN data fetched is a gallery AND an image is returned from the helper, EXPECT this to be used", async () => {
@ -176,7 +201,6 @@ describe('randomBunny', () => {
{ {
data: { data: {
archived: false, archived: false,
author: 'author',
downs: 0, downs: 0,
hidden: false, hidden: false,
permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/', permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/',
@ -199,7 +223,7 @@ describe('randomBunny', () => {
expect(result.IsSuccess).toBeTruthy(); expect(result.IsSuccess).toBeTruthy();
expect(result.Result).toBeDefined(); expect(result.Result).toBeDefined();
expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=100'); expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
expect(ImageHelper.FetchImageFromRedditGallery).toHaveBeenCalledTimes(1); expect(ImageHelper.FetchImageFromRedditGallery).toHaveBeenCalledTimes(1);
expect(ImageHelper.FetchImageFromRedditGallery).toHaveBeenCalledWith("https://i.redd.it/gallery/cr8xudsnkgua1"); expect(ImageHelper.FetchImageFromRedditGallery).toHaveBeenCalledWith("https://i.redd.it/gallery/cr8xudsnkgua1");
@ -213,7 +237,6 @@ describe('randomBunny', () => {
{ {
data: { data: {
archived: false, archived: false,
author: 'author',
downs: 0, downs: 0,
hidden: false, hidden: false,
permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/', permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/',
@ -240,109 +263,4 @@ describe('randomBunny', () => {
expect(result.Error?.Code).toBe(ErrorCode.NoImageResultsFound); expect(result.Error?.Code).toBe(ErrorCode.NoImageResultsFound);
expect(result.Error?.Message).toBe(ErrorMessages.NoImageResultsFound); expect(result.Error?.Message).toBe(ErrorMessages.NoImageResultsFound);
}); });
test("GIVEN limit is supplied, EXPECT limit sent to the API", async () => {
fetchMock.mockResolvedValue({
body: JSON.stringify({
data: {
children: [
{
data: {
archived: false,
author: 'author',
downs: 0,
hidden: false,
permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/',
subreddit: 'Rabbits',
subreddit_subscribers: 298713,
title: 'Someone told pickles its Monday… *internal fury*',
ups: 1208,
url: 'https://i.redd.it/cr8xudsnkgua1.jpg',
},
},
],
}
}),
});
const result = await randomBunny('rabbits', 'new', 50);
expect(result.IsSuccess).toBeTruthy();
expect(result.Result).toBeDefined();
expect(result.Error).toBeUndefined();
expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=50');
});
test("GIVEN limit is less than 1, EXPECT error to be returned", async () => {
fetchMock.mockResolvedValue({
body: JSON.stringify({
data: {
children: [
{
data: {
archived: false,
author: 'author',
downs: 0,
hidden: false,
permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/',
subreddit: 'Rabbits',
subreddit_subscribers: 298713,
title: 'Someone told pickles its Monday… *internal fury*',
ups: 1208,
url: 'https://i.redd.it/cr8xudsnkgua1.jpg',
},
},
],
}
}),
});
const result = await randomBunny('rabbits', 'new', 0);
expect(result.IsSuccess).toBeFalsy();
expect(result.Result).toBeUndefined();
expect(result.Error).toBeDefined();
expect(result.Error!.Code).toBe(ErrorCode.LimitOutOfRange);
expect(result.Error!.Message).toBe(ErrorMessages.LimitOutOfRange);
expect(fetchMock).not.toHaveBeenCalled();
});
test("GIVEN limit is greater than 100, EXPECT error to be returned", async () => {
fetchMock.mockResolvedValue({
body: JSON.stringify({
data: {
children: [
{
data: {
archived: false,
author: 'author',
downs: 0,
hidden: false,
permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/',
subreddit: 'Rabbits',
subreddit_subscribers: 298713,
title: 'Someone told pickles its Monday… *internal fury*',
ups: 1208,
url: 'https://i.redd.it/cr8xudsnkgua1.jpg',
},
},
],
}
}),
});
const result = await randomBunny('rabbits', 'new', 101);
expect(result.IsSuccess).toBeFalsy();
expect(result.Result).toBeUndefined();
expect(result.Error).toBeDefined();
expect(result.Error!.Code).toBe(ErrorCode.LimitOutOfRange);
expect(result.Error!.Message).toBe(ErrorMessages.LimitOutOfRange);
expect(fetchMock).not.toHaveBeenCalled();
});
}); });

1401
yarn.lock

File diff suppressed because it is too large Load diff