Compare commits
1 commit
202f8c7d52
...
f2e3102887
Author | SHA1 | Date | |
---|---|---|---|
f2e3102887 |
19 changed files with 816 additions and 926 deletions
|
@ -1,36 +0,0 @@
|
|||
name: Publish to NPM
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: node
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 20.x
|
||||
- run: yarn install --frozen-lockfile
|
||||
- run: yarn build
|
||||
- run: yarn test
|
||||
- run: yarn lint
|
||||
|
||||
publish:
|
||||
needs: build
|
||||
runs-on: node
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 20.x
|
||||
- run: yarn install --frozen-lockfile
|
||||
- run: yarn build
|
||||
- uses: https://github.com/JS-DevTools/npm-publish@v3
|
||||
with:
|
||||
token: ${{ secrets.NPM_TOKEN }}
|
|
@ -1,26 +0,0 @@
|
|||
name: Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- feature/*
|
||||
- hotfix/*
|
||||
- renovate/*
|
||||
- dependabot/*
|
||||
- develop
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: node
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 20.x
|
||||
- run: yarn install --frozen-lockfile
|
||||
- run: yarn build
|
||||
- run: yarn test
|
||||
- run: yarn lint
|
|
@ -27,12 +27,6 @@ steps:
|
|||
package_prod:
|
||||
image: node
|
||||
commands:
|
||||
- apt install unzip -y
|
||||
- wget https://github.com/xerub/ldid/releases/download/42/ldid.zip
|
||||
- unzip ldid.zip
|
||||
- mkdir /opt/ldid/
|
||||
- cp linux64/ldid /opt/ldid/
|
||||
- export PATH=/opt/ldid:$PATH
|
||||
- yarn package
|
||||
when:
|
||||
event: tag
|
||||
|
@ -44,7 +38,7 @@ steps:
|
|||
- apk add rsync openssh-client
|
||||
- eval `ssh-agent -s`
|
||||
- echo "$SSH_KEY" | tr -d '\r' | ssh-add -
|
||||
- rsync -e "ssh -o StrictHostKeyChecking=no" -r ./bin vylpes@192.168.1.115:/home/vylpes/apps/random-bunny/random-bunny_prod
|
||||
- rsync -e "ssh -o StrictHostKeyChecking=no" -r ./bin vylpes@192.168.68.120:/home/vylpes/apps/random-bunny/random-bunny_prod
|
||||
when:
|
||||
event: tag
|
||||
branch: main
|
||||
|
@ -53,12 +47,6 @@ steps:
|
|||
package_stage:
|
||||
image: node
|
||||
commands:
|
||||
- apt install unzip -y
|
||||
- wget https://github.com/xerub/ldid/releases/download/42/ldid.zip
|
||||
- unzip ldid.zip
|
||||
- mkdir /opt/ldid/
|
||||
- cp linux64/ldid /opt/ldid/
|
||||
- export PATH=/opt/ldid:$PATH
|
||||
- yarn package
|
||||
when:
|
||||
event: push
|
||||
|
@ -70,7 +58,7 @@ steps:
|
|||
- apk add rsync openssh-client
|
||||
- eval `ssh-agent -s`
|
||||
- echo "$SSH_KEY" | tr -d '\r' | ssh-add -
|
||||
- rsync -e "ssh -o StrictHostKeyChecking=no" -r ./bin vylpes@192.168.1.115:/home/vylpes/apps/random-bunny/random-bunny_stage
|
||||
- rsync -e "ssh -o StrictHostKeyChecking=no" -r ./bin vylpes@192.168.68.120:/home/vylpes/apps/random-bunny/random-bunny_stage
|
||||
when:
|
||||
event: push
|
||||
branch: develop
|
||||
|
@ -89,4 +77,4 @@ steps:
|
|||
from_secret: NPM_TOKEN
|
||||
when:
|
||||
event: tag
|
||||
branch: main
|
||||
branch: main
|
23
docs/cli.md
23
docs/cli.md
|
@ -2,19 +2,6 @@
|
|||
|
||||
Since Version 2.2, Random Bunny contains a command line interface (CLI).
|
||||
|
||||
## Downloads
|
||||
|
||||
The project can be downloaded as a binary for your system via the [GitHub Releases](https://github.com/Vylpes/random-bunny/releases) or [Gitea Releases](https://gitea.vylpes.xyz/RabbitLabs/random-bunny/releases) page.
|
||||
|
||||
We currently support:
|
||||
- Linux (x64)
|
||||
- Windows (x64)
|
||||
- macOS (x64, Arm64\*)
|
||||
|
||||
The git repository can also be cloned and ran via `yarn build` and `yarn start`.
|
||||
|
||||
> **NOTE:** We are aware of a bug in the macOS Arm64 builds failing to execute. For now you're still able to use the x64 builds under Rosetta fine. This will hopefully be fixed in a future release.
|
||||
|
||||
## Default Output
|
||||
|
||||
By default, the command will fetch a random image from `r/rabbits` and return it in a human-readable output.
|
||||
|
@ -53,9 +40,9 @@ Options:
|
|||
-s, --subreddit <subreddit> The subreddit to search (default: "rabbits")
|
||||
-j, --json Output as JSON
|
||||
-q, --query-metadata Include query metadata in result
|
||||
-o <file> Output to file
|
||||
--sort <sort> Sort by (choices: "hot", "new", "top", default: "hot")
|
||||
-h, --help display help for command
|
||||
✨ Done in 0.32s.
|
||||
```
|
||||
|
||||
## JSON output
|
||||
|
@ -95,11 +82,3 @@ This defaults to "rabbits"
|
|||
$ randombunny --subreddit rabbits
|
||||
$ randombunny -s horses
|
||||
```
|
||||
|
||||
## Output to file
|
||||
|
||||
If you'd rather send the output to a file, you can supply the `-o` flag.
|
||||
|
||||
```
|
||||
$ randombunny -o ~/Desktop/output.txt
|
||||
```
|
||||
|
|
|
@ -30,7 +30,7 @@
|
|||
"test": "jest",
|
||||
"lint": "eslint .",
|
||||
"release": "np --no-publish",
|
||||
"package": "pkg . --no-bytecode"
|
||||
"package": "pkg ."
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://gitea.vylpes.xyz/RabbitLabs/random-bunny/issues",
|
||||
|
@ -43,11 +43,11 @@
|
|||
"@types/jest": "^29.5.8",
|
||||
"@types/node": "^20.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^7.0.0",
|
||||
"@typescript-eslint/parser": "^7.0.0",
|
||||
"@typescript-eslint/parser": "^5.54.0",
|
||||
"eslint": "^9.0.0",
|
||||
"jest": "^29.7.0",
|
||||
"jest-mock-extended": "^3.0.3",
|
||||
"np": "^10.0.0",
|
||||
"np": "^9.0.0",
|
||||
"pkg": "^5.8.1",
|
||||
"ts-jest": "^29.1.1",
|
||||
"ts-mockito": "^2.6.1",
|
||||
|
|
|
@ -14,10 +14,6 @@ or if you're using yarn
|
|||
yarn add random-bunny
|
||||
```
|
||||
|
||||
### Binary
|
||||
|
||||
The project can be downloaded as a binary for your system via the [GitHub Releases](https://github.com/Vylpes/random-bunny/releases) or [Gitea Releases](https://gitea.vylpes.xyz/RabbitLabs/random-bunny/releases) page.
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
|
@ -52,7 +48,7 @@ The json string which gets returned consists of:
|
|||
|
||||
Random bunny can also be used as a CLI. This is accessible via the executable (see git releases) or via `src/cli.ts`
|
||||
|
||||
For more details, see the [documentation](https://docs.vylpes.xyz/books/random-bunny).
|
||||
For more details, see the documentation.
|
||||
|
||||
## Notes
|
||||
|
||||
|
|
38
src/cli.ts
38
src/cli.ts
|
@ -2,7 +2,6 @@ import { Command, Option } from "commander";
|
|||
import randomBunny from "./index";
|
||||
import ICliOptions from "./contracts/ICliOptions";
|
||||
import { exit } from "process";
|
||||
import CliHelper from "./helpers/cliHelper";
|
||||
|
||||
const program = new Command();
|
||||
|
||||
|
@ -13,7 +12,6 @@ program
|
|||
.option('-s, --subreddit <subreddit>', 'The subreddit to search', 'rabbits')
|
||||
.option('-j, --json', 'Output as JSON')
|
||||
.option('-q, --query-metadata', 'Include query metadata in result')
|
||||
.option('-o <file>', 'Output to file')
|
||||
.addOption(new Option('--sort <sort>', 'Sort by').default('hot').choices(['hot', 'new', 'top']));
|
||||
|
||||
program.parse();
|
||||
|
@ -21,4 +19,38 @@ program.parse();
|
|||
const options: ICliOptions = program.opts();
|
||||
|
||||
randomBunny(options.subreddit, options.sort)
|
||||
.then((response) => exit(CliHelper.Endpoint(response, options)));
|
||||
.then((response) => {
|
||||
if (response.IsSuccess) {
|
||||
const result = response.Result!;
|
||||
|
||||
const outputLines: string[] = [];
|
||||
|
||||
if (options.json) {
|
||||
console.log(JSON.stringify(result));
|
||||
return;
|
||||
}
|
||||
|
||||
outputLines.push(`Archived = ${result.Archived}`);
|
||||
outputLines.push(`Downvotes = ${result.Downs}`);
|
||||
outputLines.push(`Hidden = ${result.Hidden}`);
|
||||
outputLines.push(`Permalink = ${result.Permalink}`);
|
||||
outputLines.push(`Subreddit = ${result.Subreddit}`);
|
||||
outputLines.push(`Subreddit Subscribers = ${result.SubredditSubscribers}`);
|
||||
outputLines.push(`Title = ${result.Title}`);
|
||||
outputLines.push(`Upvotes = ${result.Ups}`);
|
||||
outputLines.push(`Url = ${result.Url}`);
|
||||
|
||||
if (options.queryMetadata != null) {
|
||||
outputLines.push(`Query.Subreddit = ${response.Query.subreddit}`);
|
||||
outputLines.push(`Query.Sort By = ${response.Query.sortBy}`);
|
||||
}
|
||||
|
||||
console.log(outputLines.join("\n"));
|
||||
exit(0);
|
||||
} else {
|
||||
const error = response.Error!;
|
||||
|
||||
console.error(error.Message, error.Code);
|
||||
exit(1);
|
||||
}
|
||||
});
|
|
@ -1,7 +1,6 @@
|
|||
export default interface ICliOptions {
|
||||
subreddit: string,
|
||||
json?: boolean,
|
||||
sort: "new" | "hot" | "top",
|
||||
o?: string,
|
||||
sort: string,
|
||||
queryMetadata?: boolean,
|
||||
}
|
|
@ -1,25 +0,0 @@
|
|||
import { writeFileSync } from "fs";
|
||||
import ICliOptions from "../contracts/ICliOptions";
|
||||
import IReturnResult from "../contracts/IReturnResult";
|
||||
import OutputHelper from "./outputHelper";
|
||||
|
||||
export default class CliHelper {
|
||||
public static Endpoint(response: IReturnResult, options: ICliOptions): number {
|
||||
if (response.IsSuccess) {
|
||||
const output = OutputHelper.GenerateOutput(response, options);
|
||||
|
||||
if (options.o) {
|
||||
writeFileSync(options.o, output);
|
||||
} else {
|
||||
console.log(output);
|
||||
}
|
||||
|
||||
return 0;
|
||||
} else {
|
||||
const error = response.Error!;
|
||||
|
||||
console.error(error.Message, error.Code);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
import ICliOptions from "../contracts/ICliOptions";
|
||||
import IReturnResult from "../contracts/IReturnResult";
|
||||
|
||||
export default class OutputHelper {
|
||||
public static GenerateOutput(response: IReturnResult, options: ICliOptions): string {
|
||||
const result = response.Result!;
|
||||
|
||||
const outputLines: string[] = [];
|
||||
|
||||
if (options.json) {
|
||||
return JSON.stringify(result);
|
||||
}
|
||||
|
||||
outputLines.push(`Archived = ${result.Archived}`);
|
||||
outputLines.push(`Downvotes = ${result.Downs}`);
|
||||
outputLines.push(`Hidden = ${result.Hidden}`);
|
||||
outputLines.push(`Permalink = ${result.Permalink}`);
|
||||
outputLines.push(`Subreddit = ${result.Subreddit}`);
|
||||
outputLines.push(`Subreddit Subscribers = ${result.SubredditSubscribers}`);
|
||||
outputLines.push(`Title = ${result.Title}`);
|
||||
outputLines.push(`Upvotes = ${result.Ups}`);
|
||||
outputLines.push(`Url = ${result.Url}`);
|
||||
|
||||
if (options.queryMetadata != null) {
|
||||
outputLines.push(`Query.Subreddit = ${response.Query.subreddit}`);
|
||||
outputLines.push(`Query.Sort By = ${response.Query.sortBy}`);
|
||||
}
|
||||
|
||||
return outputLines.join("\n");
|
||||
}
|
||||
}
|
12
src/index.ts
12
src/index.ts
|
@ -5,9 +5,17 @@ import { List } from 'linqts';
|
|||
import IFetchResult from "./contracts/IFetchResult";
|
||||
import { ErrorCode } from "./constants/ErrorCode";
|
||||
import ErrorMessages from "./constants/ErrorMessages";
|
||||
import ImageHelper from "./helpers/imageHelper";
|
||||
import ImageHelper from "./imageHelper";
|
||||
|
||||
const sortable = [
|
||||
'new',
|
||||
'hot',
|
||||
'top'
|
||||
];
|
||||
|
||||
export default async function randomBunny(subreddit: string, sortBy: string = 'hot'): Promise<IReturnResult> {
|
||||
if (!sortable.includes(sortBy)) sortBy = 'hot';
|
||||
|
||||
export default async function randomBunny(subreddit: string, sortBy: "new" | "hot" | "top" = 'hot'): Promise<IReturnResult> {
|
||||
const result = await fetch(`https://reddit.com/r/${subreddit}/${sortBy}.json?limit=100`)
|
||||
.then((res) => {
|
||||
return res;
|
||||
|
|
187
tests/cli.test.ts
Normal file
187
tests/cli.test.ts
Normal file
|
@ -0,0 +1,187 @@
|
|||
import { exec } from "child_process";
|
||||
import path from "path";
|
||||
|
||||
describe('default', () => {
|
||||
test('GIVEN no options are supplied, EXPECT standard output', async () => {
|
||||
const result = await cli([], '.');
|
||||
|
||||
const keys = result.stdout.split('\n')
|
||||
.flatMap(x => x.split(' = ')[0])
|
||||
.filter(x => x && x.length > 0);
|
||||
const values = result.stdout.split('\n')
|
||||
.flatMap(x => x.split(' = ')[1])
|
||||
.filter(x => x && x.length > 0);
|
||||
|
||||
|
||||
expect(result.code).toBe(0);
|
||||
expect(keys).toStrictEqual(['Archived', 'Downvotes', 'Hidden', 'Permalink', 'Subreddit', 'Subreddit Subscribers', 'Title', 'Upvotes', 'Url']);
|
||||
expect(values.length).toBe(9);
|
||||
}, 5000);
|
||||
|
||||
test('GIVEN an error occurs, EXPECT error output', async () => {
|
||||
const result = await cli(['-s', 'textonly'], '.');
|
||||
|
||||
expect(result.code).toBe(1);
|
||||
expect(result.stderr).toBeDefined();
|
||||
}, 5000);
|
||||
});
|
||||
|
||||
describe('version', () => {
|
||||
test('GIVEN -V flag is supplied, EXPECT version returned', async () => {
|
||||
const result = await cli(['-V'], '.');
|
||||
|
||||
expect(result.code).toBe(0);
|
||||
expect(result.stdout).toBe('2.2\n');
|
||||
});
|
||||
|
||||
test('GIVEN --version is supplied, EXPECT version returned', async () => {
|
||||
const result = await cli(['--version'], '.');
|
||||
|
||||
expect(result.code).toBe(0);
|
||||
expect(result.stdout).toBe('2.2\n');
|
||||
});
|
||||
});
|
||||
|
||||
describe('help', () => {
|
||||
test('GIVEN -h is supplied, EXPECT help returned', async () => {
|
||||
const result = await cli(['-h'], '.');
|
||||
|
||||
expect(result.code).toBe(0);
|
||||
expect(result.stdout.split('\n')[0]).toBe('Usage: random-bunny [options]');
|
||||
});
|
||||
|
||||
test('GIVEN --help is supplied, EXPECT help returned', async () => {
|
||||
const result = await cli(['--help'], '.');
|
||||
|
||||
expect(result.code).toBe(0);
|
||||
expect(result.stdout.split('\n')[0]).toBe('Usage: random-bunny [options]');
|
||||
});
|
||||
});
|
||||
|
||||
describe('subreddit', () => {
|
||||
test('GIVEN -s is not supplied, EXPECT subreddit to be defaulted', async () => {
|
||||
const result = await cli([], '.');
|
||||
|
||||
const subreddit = result.stdout.split('\n')
|
||||
.find(x => x && x.length > 0 && x.split(' = ')[0] == 'Subreddit')!
|
||||
.split(' = ')[1];
|
||||
|
||||
expect(subreddit).toBe('Rabbits');
|
||||
}, 5000);
|
||||
|
||||
test('GIVEN -s is supplied, EXPECT subreddit to be changed', async () => {
|
||||
const result = await cli(['-s', 'pics'], '.');
|
||||
|
||||
const subreddit = result.stdout.split('\n')
|
||||
.find(x => x && x.length > 0 && x.split(' = ')[0] == 'Subreddit')!
|
||||
.split(' = ')[1];
|
||||
|
||||
expect(subreddit).toBe('pics');
|
||||
}, 5000);
|
||||
|
||||
test('GIVEN --subreddit is supplied, EXPECT subreddit to be changed', async () => {
|
||||
const result = await cli(['--subreddit', 'pics'], '.');
|
||||
|
||||
const subreddit = result.stdout.split('\n')
|
||||
.find(x => x && x.length > 0 && x.split(' = ')[0] == 'Subreddit')!
|
||||
.split(' = ')[1];
|
||||
|
||||
expect(subreddit).toBe('pics');
|
||||
}, 5000);
|
||||
});
|
||||
|
||||
describe('json', () => {
|
||||
test('GIVEN -j is supplied, EXPECT output to be valid JSON', async () => {
|
||||
const result = await cli(['-j'], '.');
|
||||
|
||||
const json = JSON.parse(result.stdout);
|
||||
|
||||
expect(json).toBeDefined();
|
||||
}, 5000);
|
||||
|
||||
test('GIVEN --json is supplied, EXPECT output to be valid JSON', async () => {
|
||||
const result = await cli(['--json'], '.');
|
||||
|
||||
const json = JSON.parse(result.stdout);
|
||||
|
||||
expect(json).toBeDefined();
|
||||
}, 5000);
|
||||
});
|
||||
|
||||
describe('sort', () => {
|
||||
test('GIVEN --sort is not supplied, EXPECT sort to be defaulted', async () => {
|
||||
const result = await cli(['-q'], '.');
|
||||
|
||||
const sortBy = result.stdout.split('\n')
|
||||
.find(x => x && x.length > 0 && x.split(' = ')[0] == 'Query.Sort By')!
|
||||
.split(' = ')[1];
|
||||
|
||||
expect(sortBy).toBe('hot');
|
||||
}, 5000);
|
||||
|
||||
test('GIVEN --sort is supplied WITH a valid input, EXPECT sort to be used', async () => {
|
||||
const result = await cli(['-q', '--sort', 'new'], '.');
|
||||
|
||||
const sortBy = result.stdout.split('\n')
|
||||
.find(x => x && x.length > 0 && x.split(' = ')[0] == 'Query.Sort By')!
|
||||
.split(' = ')[1];
|
||||
|
||||
expect(sortBy).toBe('new');
|
||||
}, 5000);
|
||||
|
||||
test('GIVEN --sort is supplied WITH an invalid input, EXPECT error', async () => {
|
||||
const result = await cli(['-q', '--sort', 'invalid'], '.');
|
||||
|
||||
expect(result.code).toBe(1);
|
||||
expect(result.stderr).toBe("error: option '--sort <sort>' argument 'invalid' is invalid. Allowed choices are hot, new, top.\n");
|
||||
}, 5000);
|
||||
});
|
||||
|
||||
describe('query-metadata', () => {
|
||||
test('GIVEN --query-metadata is not supplied, EXPECT no query metadata returned', async () => {
|
||||
const result = await cli([], '.');
|
||||
|
||||
const query = result.stdout.split('\n')
|
||||
.find(x => x && x.length > 0 && x.split(' = ')[0].startsWith('Query'));
|
||||
|
||||
expect(query).toBeUndefined();
|
||||
}, 5000);
|
||||
|
||||
test('GIVEN --query-metadata is supplied, EXPECT query metadata returned', async () => {
|
||||
const result = await cli(['--query-metadata'], '.');
|
||||
|
||||
const query = result.stdout.split('\n')
|
||||
.find(x => x && x.length > 0 && x.split(' = ')[0].startsWith('Query'));
|
||||
|
||||
expect(query).toBeDefined();
|
||||
}, 5000);
|
||||
|
||||
test('GIVEN -q is supplied, EXPECT query metadata returned', async () => {
|
||||
const result = await cli(['-q'], '.');
|
||||
|
||||
const query = result.stdout.split('\n')
|
||||
.find(x => x && x.length > 0 && x.split(' = ')[0].startsWith('Query'));
|
||||
|
||||
expect(query).toBeDefined();
|
||||
}, 5000);
|
||||
});
|
||||
|
||||
function cli(args: string[], cwd: string): Promise<cliResult> {
|
||||
return new Promise(resolve => {
|
||||
exec(`node ${path.resolve('./dist/cli.js')} ${args.join(' ')}`,
|
||||
{ cwd },
|
||||
(error, stdout, stderr) => { resolve({
|
||||
code: error && error.code ? error.code : 0,
|
||||
error,
|
||||
stdout,
|
||||
stderr });
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
interface cliResult {
|
||||
code: number,
|
||||
error: any,
|
||||
stdout: string,
|
||||
stderr: string,
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`GenerateOutput EXPECT standout output to be returned 1`] = `
|
||||
"Archived = false
|
||||
Downvotes = 0
|
||||
Hidden = false
|
||||
Permalink = /r/Rabbits/comments/1dj8pbt/this_is_my_ms_bear/
|
||||
Subreddit = Rabbits
|
||||
Subreddit Subscribers = 654751
|
||||
Title = This is my Ms Bear!
|
||||
Upvotes = 17
|
||||
Url = https://preview.redd.it/d5yno653zf7d1.jpg?width=640&crop=smart&auto=webp&s=5064d1caec3c12ac2855eb57ff131d0b313d5e9d"
|
||||
`;
|
||||
|
||||
exports[`GenerateOutput GIVEN options.json is true, EXPECT output to be returned as JSON 1`] = `"{"Archived":false,"Downs":0,"Hidden":false,"Permalink":"/r/Rabbits/comments/1dj8pbt/this_is_my_ms_bear/","Subreddit":"Rabbits","SubredditSubscribers":654751,"Title":"This is my Ms Bear!","Ups":17,"Url":"https://preview.redd.it/d5yno653zf7d1.jpg?width=640&crop=smart&auto=webp&s=5064d1caec3c12ac2855eb57ff131d0b313d5e9d"}"`;
|
||||
|
||||
exports[`GenerateOutput GIVEN options.queryMetadata is supplied, EXPECT query metadata to be added 1`] = `
|
||||
"Archived = false
|
||||
Downvotes = 0
|
||||
Hidden = false
|
||||
Permalink = /r/Rabbits/comments/1dj8pbt/this_is_my_ms_bear/
|
||||
Subreddit = Rabbits
|
||||
Subreddit Subscribers = 654751
|
||||
Title = This is my Ms Bear!
|
||||
Upvotes = 17
|
||||
Url = https://preview.redd.it/d5yno653zf7d1.jpg?width=640&crop=smart&auto=webp&s=5064d1caec3c12ac2855eb57ff131d0b313d5e9d
|
||||
Query.Subreddit = rabbits
|
||||
Query.Sort By = hot"
|
||||
`;
|
|
@ -1,118 +0,0 @@
|
|||
import fs from "fs";
|
||||
import CliHelper from "../../src/helpers/cliHelper";
|
||||
import ICliOptions from "../../src/contracts/ICliOptions";
|
||||
import IReturnResult from "../../src/contracts/IReturnResult";
|
||||
import OutputHelper from "../../src/helpers/outputHelper";
|
||||
import { ErrorCode } from "../../src/constants/ErrorCode";
|
||||
|
||||
describe("Endpoint", () => {
|
||||
describe("GIVEN response is successful", () => {
|
||||
test("GIVEN options.o is defined, EXPECT output written to file", () => {
|
||||
// Arrange
|
||||
const response = {
|
||||
IsSuccess: true,
|
||||
} as IReturnResult;
|
||||
|
||||
const options = {
|
||||
o: "file.txt",
|
||||
} as ICliOptions;
|
||||
|
||||
OutputHelper.GenerateOutput = jest.fn().mockReturnValue("test output");
|
||||
|
||||
fs.writeFileSync = jest.fn();
|
||||
|
||||
console.log = jest.fn();
|
||||
|
||||
console.error = jest.fn();
|
||||
|
||||
// Act
|
||||
const result = CliHelper.Endpoint(response, options);
|
||||
|
||||
// Assert
|
||||
expect(result).toBe(0);
|
||||
|
||||
expect(OutputHelper.GenerateOutput).toHaveBeenCalledTimes(1);
|
||||
expect(OutputHelper.GenerateOutput).toHaveBeenCalledWith(response, options);
|
||||
|
||||
expect(fs.writeFileSync).toHaveBeenCalledTimes(1);
|
||||
expect(fs.writeFileSync).toHaveBeenCalledWith("file.txt", "test output");
|
||||
|
||||
expect(console.log).not.toHaveBeenCalled();
|
||||
|
||||
expect(console.error).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test("GIVEN options.o is undefined, EXPECT output logged to console", () => {
|
||||
// Arrange
|
||||
const response = {
|
||||
IsSuccess: true,
|
||||
} as IReturnResult;
|
||||
|
||||
const options = {
|
||||
o: undefined,
|
||||
} as ICliOptions;
|
||||
|
||||
OutputHelper.GenerateOutput = jest.fn().mockReturnValue("test output");
|
||||
|
||||
fs.writeFileSync = jest.fn();
|
||||
|
||||
console.log = jest.fn();
|
||||
|
||||
console.error = jest.fn();
|
||||
|
||||
// Act
|
||||
const result = CliHelper.Endpoint(response, options);
|
||||
|
||||
// Assert
|
||||
expect(result).toBe(0);
|
||||
|
||||
expect(OutputHelper.GenerateOutput).toHaveBeenCalledTimes(1);
|
||||
expect(OutputHelper.GenerateOutput).toHaveBeenCalledWith(response, options);
|
||||
|
||||
expect(fs.writeFileSync).not.toHaveBeenCalled();
|
||||
|
||||
expect(console.log).toHaveBeenCalledTimes(1);
|
||||
expect(console.log).toHaveBeenCalledWith("test output");
|
||||
|
||||
expect(console.error).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
test("GIVEN response is failure, EXPECT error logged to console", () => {
|
||||
// Arrange
|
||||
const response = {
|
||||
IsSuccess: false,
|
||||
Error: {
|
||||
Message: "error message",
|
||||
Code: ErrorCode.FailedToFetchReddit,
|
||||
},
|
||||
} as IReturnResult;
|
||||
|
||||
const options = {
|
||||
o: "file.txt",
|
||||
} as ICliOptions;
|
||||
|
||||
OutputHelper.GenerateOutput = jest.fn().mockReturnValue("test output");
|
||||
|
||||
fs.writeFileSync = jest.fn();
|
||||
|
||||
console.log = jest.fn();
|
||||
|
||||
console.error = jest.fn();
|
||||
|
||||
// Act
|
||||
const result = CliHelper.Endpoint(response, options);
|
||||
|
||||
// Assert
|
||||
expect(result).toBe(1);
|
||||
|
||||
expect(OutputHelper.GenerateOutput).not.toHaveBeenCalled();
|
||||
|
||||
expect(fs.writeFileSync).not.toHaveBeenCalled();
|
||||
|
||||
expect(console.log).not.toHaveBeenCalled();
|
||||
|
||||
expect(console.error).toHaveBeenCalledTimes(1);
|
||||
expect(console.error).toHaveBeenCalledWith("error message", ErrorCode.FailedToFetchReddit);
|
||||
});
|
||||
});
|
|
@ -1,99 +0,0 @@
|
|||
import ICliOptions from "../../src/contracts/ICliOptions";
|
||||
import IReturnResult from "../../src/contracts/IReturnResult";
|
||||
import OutputHelper from "../../src/helpers/outputHelper";
|
||||
|
||||
describe("GenerateOutput", () => {
|
||||
test("EXPECT standout output to be returned", () => {
|
||||
// Arrange
|
||||
const response = {
|
||||
IsSuccess: true,
|
||||
Query: {
|
||||
subreddit: "rabbits",
|
||||
sortBy: "hot",
|
||||
},
|
||||
Result: {
|
||||
Archived: false,
|
||||
Downs: 0,
|
||||
Hidden: false,
|
||||
Permalink: "/r/Rabbits/comments/1dj8pbt/this_is_my_ms_bear/",
|
||||
Subreddit: "Rabbits",
|
||||
SubredditSubscribers: 654751,
|
||||
Title: "This is my Ms Bear!",
|
||||
Ups: 17,
|
||||
Url: "https://preview.redd.it/d5yno653zf7d1.jpg?width=640&crop=smart&auto=webp&s=5064d1caec3c12ac2855eb57ff131d0b313d5e9d",
|
||||
},
|
||||
} as IReturnResult;
|
||||
|
||||
const options = {} as ICliOptions;
|
||||
|
||||
// Act
|
||||
const result = OutputHelper.GenerateOutput(response, options);
|
||||
|
||||
// Assert
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test("GIVEN options.json is true, EXPECT output to be returned as JSON", () => {
|
||||
// Arrange
|
||||
const response = {
|
||||
IsSuccess: true,
|
||||
Query: {
|
||||
subreddit: "rabbits",
|
||||
sortBy: "hot",
|
||||
},
|
||||
Result: {
|
||||
Archived: false,
|
||||
Downs: 0,
|
||||
Hidden: false,
|
||||
Permalink: "/r/Rabbits/comments/1dj8pbt/this_is_my_ms_bear/",
|
||||
Subreddit: "Rabbits",
|
||||
SubredditSubscribers: 654751,
|
||||
Title: "This is my Ms Bear!",
|
||||
Ups: 17,
|
||||
Url: "https://preview.redd.it/d5yno653zf7d1.jpg?width=640&crop=smart&auto=webp&s=5064d1caec3c12ac2855eb57ff131d0b313d5e9d",
|
||||
},
|
||||
} as IReturnResult;
|
||||
|
||||
const options = {
|
||||
json: true,
|
||||
} as ICliOptions;
|
||||
|
||||
// Act
|
||||
const result = OutputHelper.GenerateOutput(response, options);
|
||||
|
||||
// Assert
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test("GIVEN options.queryMetadata is supplied, EXPECT query metadata to be added", () => {
|
||||
// Arrange
|
||||
const response = {
|
||||
IsSuccess: true,
|
||||
Query: {
|
||||
subreddit: "rabbits",
|
||||
sortBy: "hot",
|
||||
},
|
||||
Result: {
|
||||
Archived: false,
|
||||
Downs: 0,
|
||||
Hidden: false,
|
||||
Permalink: "/r/Rabbits/comments/1dj8pbt/this_is_my_ms_bear/",
|
||||
Subreddit: "Rabbits",
|
||||
SubredditSubscribers: 654751,
|
||||
Title: "This is my Ms Bear!",
|
||||
Ups: 17,
|
||||
Url: "https://preview.redd.it/d5yno653zf7d1.jpg?width=640&crop=smart&auto=webp&s=5064d1caec3c12ac2855eb57ff131d0b313d5e9d",
|
||||
},
|
||||
} as IReturnResult;
|
||||
|
||||
const options = {
|
||||
queryMetadata: true,
|
||||
} as ICliOptions;
|
||||
|
||||
// Act
|
||||
const result = OutputHelper.GenerateOutput(response, options);
|
||||
|
||||
// Assert
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
});
|
|
@ -1,4 +1,4 @@
|
|||
import ImageHelper from "../../src/helpers/imageHelper";
|
||||
import ImageHelper from "../src/imageHelper";
|
||||
import fetch from "got-cjs";
|
||||
|
||||
jest.mock('got-cjs');
|
|
@ -1,6 +1,6 @@
|
|||
import { ErrorCode } from "../src/constants/ErrorCode";
|
||||
import ErrorMessages from "../src/constants/ErrorMessages";
|
||||
import ImageHelper from "../src/helpers/imageHelper";
|
||||
import ImageHelper from "../src/imageHelper";
|
||||
import randomBunny from "../src/index";
|
||||
import fetch from "got-cjs";
|
||||
|
||||
|
@ -37,7 +37,7 @@ describe('randomBunny', () => {
|
|||
expect(result.Result).toBeDefined();
|
||||
expect(result.Error).toBeUndefined();
|
||||
|
||||
expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
|
||||
expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
|
||||
});
|
||||
|
||||
test('GIVEN sortBy is NOT supplied, expect it to default to hot', async () => {
|
||||
|
@ -69,7 +69,39 @@ describe('randomBunny', () => {
|
|||
expect(result.Result).toBeDefined();
|
||||
expect(result.Error).toBeUndefined();
|
||||
|
||||
expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/hot.json?limit=100');
|
||||
expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/hot.json?limit=100');
|
||||
});
|
||||
|
||||
test('GIVEN sortBy is NOT valid, expect it to default to hot', async () => {
|
||||
fetchMock.mockResolvedValue({
|
||||
body: JSON.stringify({
|
||||
data: {
|
||||
children: [
|
||||
{
|
||||
data: {
|
||||
archived: false,
|
||||
downs: 0,
|
||||
hidden: false,
|
||||
permalink: '/r/Rabbits/comments/12pa5te/someone_told_pickles_its_monday_internal_fury/',
|
||||
subreddit: 'Rabbits',
|
||||
subreddit_subscribers: 298713,
|
||||
title: 'Someone told pickles it’s Monday… *internal fury*',
|
||||
ups: 1208,
|
||||
url: 'https://i.redd.it/cr8xudsnkgua1.jpg',
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
}),
|
||||
});
|
||||
|
||||
const result = await randomBunny('rabbits', 'invalid');
|
||||
|
||||
expect(result.IsSuccess).toBeTruthy();
|
||||
expect(result.Result).toBeDefined();
|
||||
expect(result.Error).toBeUndefined();
|
||||
|
||||
expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/hot.json?limit=100');
|
||||
});
|
||||
|
||||
test('GIVEN the fetch fails, EXPECT failure result', async () => {
|
||||
|
@ -84,7 +116,7 @@ describe('randomBunny', () => {
|
|||
expect(result.Error!.Code).toBe(ErrorCode.FailedToFetchReddit);
|
||||
expect(result.Error!.Message).toBe(ErrorMessages.FailedToFetchReddit);
|
||||
|
||||
expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
|
||||
expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
|
||||
});
|
||||
|
||||
test('GIVEN the result is NOT valid JSON, EXPECT failure result', async () => {
|
||||
|
@ -101,7 +133,7 @@ describe('randomBunny', () => {
|
|||
expect(result.Error!.Code).toBe(ErrorCode.UnableToParseJSON);
|
||||
expect(result.Error!.Message).toBe(ErrorMessages.UnableToParseJSON);
|
||||
|
||||
expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
|
||||
expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
|
||||
});
|
||||
|
||||
test('GIVEN randomSelect does NOT find a response, EXPECT failure result', async () => {
|
||||
|
@ -157,8 +189,8 @@ describe('randomBunny', () => {
|
|||
expect(result.Error!.Code).toBe(ErrorCode.NoImageResultsFound);
|
||||
expect(result.Error!.Message).toBe(ErrorMessages.NoImageResultsFound);
|
||||
|
||||
expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
|
||||
expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
|
||||
expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
|
||||
expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
|
||||
});
|
||||
|
||||
test("GIVEN data fetched is a gallery AND an image is returned from the helper, EXPECT this to be used", async () => {
|
||||
|
@ -191,7 +223,7 @@ describe('randomBunny', () => {
|
|||
expect(result.IsSuccess).toBeTruthy();
|
||||
expect(result.Result).toBeDefined();
|
||||
|
||||
expect(fetchMock).toHaveBeenCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
|
||||
expect(fetchMock).toBeCalledWith('https://reddit.com/r/rabbits/new.json?limit=100');
|
||||
|
||||
expect(ImageHelper.FetchImageFromRedditGallery).toHaveBeenCalledTimes(1);
|
||||
expect(ImageHelper.FetchImageFromRedditGallery).toHaveBeenCalledWith("https://i.redd.it/gallery/cr8xudsnkgua1");
|
||||
|
|
Loading…
Reference in a new issue