11 Commits

Author SHA1 Message Date
Dave Gallant
258ca59bdf Bump to 0.6.0 (#82) 2020-08-16 19:34:09 -04:00
Dave Gallant
e433a954cd Update README.md (#81) 2020-08-16 16:36:33 -04:00
Dave Gallant
b30a3c5b66 Set default of threads command to 1 page and 40 threads (#80)
* Set default of threads command to 1 page and 40 threads

* Echo posts in a pager
2020-08-16 15:07:48 -04:00
Dave Gallant
cfd7e01e43 Add integration tests (#79)
Adds integration tests in pytest to detect breaking changes.
2020-08-15 20:55:10 -04:00
Dave Gallant
f664cbd9c6 Add download count to README.md 2020-08-03 19:13:46 -04:00
Dave Gallant
ee6939aafe feat: change output to echo via pager 2020-08-03 15:48:39 -04:00
Dave Gallant
ad4a072325 Simplify sort-by 'total_views' -> 'views' (#76) 2020-08-02 21:42:53 -04:00
Dave Gallant
20089bc699 Add --sort-by flag to threads sub-command (#75)
* Add --sort-by flag to threads sub-command

* Add some sanity test commands to tox
2020-08-02 19:30:04 -04:00
Dave Gallant
83d583d2b0 Make a display thread function for re-use (#74) 2020-07-15 21:48:58 -04:00
Dave Gallant
2c65d29262 Add python3.8 and python3.9-dev to travis-ci (#73)
* Add python38 to travis

* Add python39-dev
2020-07-04 17:29:17 -04:00
Dave Gallant
a5d1bb197d Create codeql-analysis.yml (#71)
* Create codeql-analysis.yml

* Update .github/PULL_REQUEST_TEMPLATE.md
2020-07-01 18:54:52 -04:00
21 changed files with 306 additions and 211 deletions

View File

@@ -1,9 +1 @@
**What this PR does / why we need it:**
-
**Which issue(s) this PR fixes:**
-
**Additional Notes:**
-
###### Motivation for this change

27
.github/workflows/codeql-analysis.yml vendored Normal file
View File

@@ -0,0 +1,27 @@
name: "Code scanning - action"
on:
push:
pull_request:
schedule:
- cron: '0 4 * * 1'
jobs:
CodeQL-Build:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
with:
fetch-depth: 2
- run: git checkout HEAD^2
if: ${{ github.event_name == 'pull_request' }}
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.3.0
rev: v2.5.0
hooks:
- id: check-added-large-files
- id: check-ast

View File

@@ -1,5 +1,5 @@
language: python
dist: xenial
dist: bionic
sudo: false
cache: false
stages:
@@ -18,15 +18,14 @@ script:
jobs:
include:
- python: "2.7"
- python: "3.5"
- python: "3.6"
- python: "3.7"
- python: "3.8"
- python: "3.9-dev"
- stage: deploy
python: "3.7"
python: "3.8"
deploy:
on:
repo: davegallant/rfd

View File

@@ -32,15 +32,10 @@ lint:
.PHONY: lint
## test: Run all unit tests
test: tmp/.tests-passed.sentinel
test:
> pytest -vvv tests
.PHONY: test
# Tests - re-ran if any file under src has been changed since tmp/.tests-passed.sentinel was last touched
tmp/.tests-passed.sentinel: $(shell find ${SRC} -type f)
> mkdir -p $(@D)
> pytest -v
> touch $@
## pr: Run pre-commit, lint and test
pr: precommit lint test
.PHONY: pr

View File

@@ -5,13 +5,10 @@ Hot deals on the command line.
[![Build Status](https://travis-ci.org/davegallant/rfd.svg?branch=master)](https://travis-ci.org/davegallant/rfd)
[![PyPI version](https://badge.fury.io/py/rfd.svg)](https://badge.fury.io/py/rfd)
[![Dependabot](https://badgen.net/badge/Dependabot/enabled/green?icon=dependabot)](https://dependabot.com/)
[![Total alerts](https://img.shields.io/lgtm/alerts/g/davegallant/rfd.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/davegallant/rfd/alerts/)
[![Language grade: Python](https://img.shields.io/lgtm/grade/python/g/davegallant/rfd.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/davegallant/rfd/context:python)
[![Downloads](https://pepy.tech/badge/rfd)](https://pepy.tech/project/rfd)
![screenshot](https://user-images.githubusercontent.com/4519234/85969861-e10a4100-b996-11ea-9a31-6203322c60ee.png)
## Install
```bash
@@ -37,29 +34,60 @@ Commands:
## Examples
### view hot deals
```shell
rfd threads
All commands open up in a pager.
Search can be done using `/`.
Close pager with `q`.
### View Hot Deals
```console
$ rfd threads
```
### search for pizza
```shell
rfd search 'pizza'
### View and Sort Hot Deals
```console
$ rfd threads --sort-by score
```
## Tab Completion
```console
$ rfd threads --sort-by views --pages 10
```
To enable:
### Simple Search
```console
$ rfd search 'pizza'
```
### Advanced Search
Regular expressions can be used for search.
```console
$ rfd search '(coffee|starbucks)' --pages 10 --sort-by views
```
### View Posts
```console
$ rfd posts https://forums.redflagdeals.com/kobo-vs-kindle-2396227/
```
## Shell Completion
Completion can be enabled if using `bash` or `zsh`.
### bash
```bash
echo 'eval "$(_RFD_COMPLETE=source rfd)"' >> ~/.profile
```console
$ echo 'eval "$(_RFD_COMPLETE=source rfd)"' >> ~/.profile
```
### zsh
```zsh
echo 'eval "$(_RFD_COMPLETE=source_zsh rfd)"' >> ~/.zshrc
```console
$ echo 'eval "$(_RFD_COMPLETE=source_zsh rfd)"' >> ~/.zshrc
```

View File

@@ -1 +1 @@
0.3.5
0.6.0

View File

@@ -6,3 +6,5 @@ __title__ = "RFD CLI"
__author__ = "Dave Gallant"
__license__ = "Apache 2.0"
__copyright__ = "(c) 2018 Dave Gallant"
API_BASE_URL = "https://forums.redflagdeals.com"

View File

@@ -6,11 +6,10 @@ except ImportError:
JSONDecodeError = ValueError
import logging
import requests
from .constants import API_BASE_URL
from .format import strip_html, is_valid_url
from .models import Post
from . import API_BASE_URL
from .posts import Post
from .scores import calculate_score
from .utils import is_int
from .utils import is_int, strip_html, is_valid_url
def extract_post_id(url):
@@ -34,28 +33,30 @@ def create_user_map(users):
return m
def get_threads(forum_id, limit, page=1):
def get_threads(forum_id, pages):
"""Get threads from rfd api
Arguments:
forum_id {int} -- forum id
limit {[type]} -- limit number of threads returned
pages {int} -- the number of pages of threads to collect
Returns:
dict -- api response
"""
threads = []
try:
response = requests.get(
"{}/api/topics?forum_id={}&per_page={}&page={}".format(
API_BASE_URL, forum_id, get_safe_per_page(limit), page
for page in range(1, pages + 1):
response = requests.get(
"{}/api/topics?forum_id={}&per_page=40&page={}".format(
API_BASE_URL, forum_id, page
)
)
)
if response.status_code == 200:
return response.json()
logging.error("Unable to retrieve threads. %s", response.text)
if response.status_code != 200:
raise Exception("When collecting threads, received a status code: %s" % response.status_code)
threads += response.json().get("topics")
except JSONDecodeError as err:
logging.error("Unable to retrieve threads. %s", err)
return None
logging.error("Unable to decode threads. %s", err)
return threads
def get_posts(post):

View File

@@ -2,17 +2,15 @@ from __future__ import unicode_literals
import logging
import os
import sys
import click
from colorama import init, Fore, Style
from colorama import init
from .api import get_threads, get_posts
from .search import search_threads
from .parsing import parse_threads
from .threads import parse_threads, search_threads, sort_threads, generate_thread_output
from .posts import generate_posts_output
from .__version__ import version as current_version
init()
print()
logging.getLogger()
logging.getLogger().setLevel(logging.INFO)
@@ -20,26 +18,12 @@ logging.getLogger().addHandler(logging.StreamHandler())
def get_version():
return "rfd " + current_version
def get_terminal_width():
_, columns = os.popen("stty size", "r").read().split()
return int(columns)
def get_vote_color(score):
if score > 0:
return Fore.GREEN + " [+" + str(score) + "] "
if score < 0:
return Fore.RED + " [" + str(score) + "] "
return Fore.BLUE + " [" + str(score) + "] "
return "rfd v" + current_version
def print_version(ctx, value):
if not value or ctx.resilient_parsing:
return
click.echo(get_version())
click.echo(get_version(), nl=False)
ctx.exit()
@@ -73,18 +57,7 @@ def posts(post_id):
"""
try:
click.echo("-" * get_terminal_width())
for post in get_posts(post=post_id):
click.echo(
" -"
+ get_vote_color(post.score)
+ Fore.RESET
+ post.body
+ Fore.YELLOW
+ " ({})".format(post.user)
)
click.echo(Style.RESET_ALL)
click.echo("-" * get_terminal_width())
click.echo_via_pager(generate_posts_output(get_posts(post=post_id)))
except ValueError:
click.echo("Invalid post id.")
sys.exit(1)
@@ -94,9 +67,10 @@ def posts(post_id):
@cli.command(short_help="Displays threads in the forum. Defaults to hot deals.")
@click.option("--limit", default=10, help="Number of topics.")
@click.option("--forum-id", default=9, help="The forum id number")
def threads(limit, forum_id):
@click.option("--pages", default=1, help="Number of pages to show. Defaults to 1.")
@click.option("--sort-by", default=None, help="Sort threads by")
def threads(forum_id, pages, sort_by):
"""Display threads in the specified forum id. Defaults to 9 (hot deals).
Popular forum ids:
@@ -113,30 +87,18 @@ def threads(limit, forum_id):
74 \t shopping discussion
88 \t cell phones
"""
_threads = parse_threads(get_threads(forum_id, limit), limit)
for count, thread in enumerate(_threads, 1):
click.echo(
" "
+ str(count)
+ "."
+ get_vote_color(thread.score)
+ Fore.RESET
+ "[%s] %s" % (thread.dealer_name, thread.title)
+ Fore.LIGHTYELLOW_EX
+ " (%d views)" % thread.total_views
+ Fore.RESET
)
click.echo(Fore.BLUE + " {}".format(thread.url))
click.echo(Style.RESET_ALL)
_threads = sort_threads(parse_threads(get_threads(forum_id, pages)), sort_by=sort_by)
click.echo_via_pager(generate_thread_output(_threads))
@cli.command(short_help="Search deals based on a regular expression.")
@click.option("--num-pages", default=5, help="Number of pages to search.")
@click.option("--pages", default=5, help="Number of pages to search.")
@click.option(
"--forum-id", default=9, help="The forum id number. Defaults to 9 (hot deals)."
)
@click.option("--sort-by", default=None, help="Sort threads by")
@click.argument("regex")
def search(num_pages, forum_id, regex):
def search(pages, forum_id, sort_by, regex):
"""Search deals based on regex.
Popular forum ids:
@@ -154,18 +116,11 @@ def search(num_pages, forum_id, regex):
88 \t cell phones
"""
count = 0
for page in range(1, num_pages):
_threads = parse_threads(get_threads(forum_id, 100, page=page), limit=100)
for thread in search_threads(threads=_threads, regex=regex):
count += 1
click.echo(
" "
+ str(count)
+ "."
+ get_vote_color(thread.score)
+ Fore.RESET
+ "[%s] %s" % (thread.dealer_name, thread.title)
)
click.echo(Fore.BLUE + " {}".format(thread.url))
click.echo(Style.RESET_ALL)
matched_threads = []
_threads = parse_threads(get_threads(forum_id, pages=pages))
for thread in search_threads(threads=_threads, regex=regex):
matched_threads.append(thread)
click.echo_via_pager(
generate_thread_output(sort_threads(matched_threads, sort_by=sort_by))
)

View File

@@ -1 +0,0 @@
API_BASE_URL = "https://forums.redflagdeals.com"

View File

@@ -1,16 +0,0 @@
"""Formatting utils"""
try:
from urllib.parse import urlparse # python 3
except ImportError:
from urlparse import urlparse # python 2
from bs4 import BeautifulSoup
def strip_html(text):
return BeautifulSoup(text, "html.parser").get_text()
def is_valid_url(url):
result = urlparse(url)
return all([result.scheme, result.netloc, result.path])

View File

@@ -1,18 +0,0 @@
# pylint: disable=old-style-class
class Thread:
def __init__(self, title, dealer_name, score, url, total_views):
self.dealer_name = dealer_name
self.score = score
self.title = title
self.url = url
self.total_views = total_views
def __repr__(self):
return "Thread(%s)" % self.title
class Post:
def __init__(self, body, score, user):
self.body = body
self.score = score
self.user = user

View File

@@ -1,35 +0,0 @@
from .constants import API_BASE_URL
from .scores import calculate_score
from .models import Thread
def build_web_path(slug):
return "{}{}".format(API_BASE_URL, slug)
def parse_threads(threads, limit):
"""parse topics list api response into digestible list.
Arguments:
threads {dict} -- topics response from rfd api
limit {int} -- limit number of threads returned
Returns:
list(dict) -- digestible list of threads
"""
parsed_threads = []
if threads is None:
return []
for count, topic in enumerate(threads.get("topics"), start=1):
if count > limit:
break
parsed_threads.append(
Thread(
title=topic.get("title"),
dealer_name=topic["offer"].get("dealer_name"),
score=calculate_score(topic),
url=build_web_path(topic.get("web_path")),
total_views=topic.get("total_views"),
)
)
return parsed_threads

32
rfd/posts.py Normal file
View File

@@ -0,0 +1,32 @@
# pylint: disable=old-style-class
import os
from colorama import Fore, Style
from .scores import get_vote_color
class Post:
def __init__(self, body, score, user):
self.body = body
self.score = score
self.user = user
def get_terminal_width():
_, columns = os.popen("stty size", "r").read().split()
return int(columns)
def generate_posts_output(posts):
output = ""
output += ("-" * get_terminal_width())
for post in posts:
output += (
" -"
+ get_vote_color(post.score)
+ Fore.RESET
+ post.body
+ Fore.YELLOW
+ " ({})".format(post.user)
)
output += (Style.RESET_ALL)
output += "\n"
output += ("-" * get_terminal_width())
output += "\n"
return output

View File

@@ -1,3 +1,6 @@
from colorama import Fore
def calculate_score(post):
"""Calculate either topic or post score. If votes cannot be retrieved, the score is 0.
@@ -16,3 +19,11 @@ def calculate_score(post):
pass
return score
def get_vote_color(score):
if score > 0:
return Fore.GREEN + " [+" + str(score) + "] "
if score < 0:
return Fore.RED + " [" + str(score) + "] "
return Fore.BLUE + " [" + str(score) + "] "

View File

@@ -1,14 +0,0 @@
import re
def search_threads(threads, regex):
"""Match deal title and dealer names with regex specified."""
regexp = re.compile(str(regex).lower())
for deal in threads:
if regexp.search(deal.title.lower()) or (
deal.dealer_name and regexp.search(deal.dealer_name.lower())
):
yield deal

99
rfd/threads.py Normal file
View File

@@ -0,0 +1,99 @@
import re
from colorama import Fore, Style
from . import API_BASE_URL
from .scores import calculate_score, get_vote_color
# pylint: disable=old-style-class
class Thread:
def __init__(self, title, dealer_name, score, url, views):
self.dealer_name = dealer_name
self.score = score
self.title = title
self.url = url
self.views = views
def __repr__(self):
return "Thread(%s)" % self.title
def build_web_path(slug):
return "{}{}".format(API_BASE_URL, slug)
def get_dealer(topic):
dealer = None
if topic.get("offer"):
dealer = topic.get("offer").get("dealer_name")
return dealer
def parse_threads(threads):
"""Parse topics list api response into digestible list.
Arguments:
threads {dict} -- topics response from rfd api
Returns:
list(dict) -- digestible list of threads
"""
parsed_threads = []
if threads is None:
return []
for topic in threads:
parsed_threads.append(
Thread(
title=topic.get("title"),
dealer_name=get_dealer(topic),
score=calculate_score(topic),
url=build_web_path(topic.get("web_path")),
views=topic.get("total_views"),
)
)
return parsed_threads
def sort_threads(threads, sort_by):
"""Sort threads by an attribute"""
if sort_by is None:
return threads
assert sort_by in ["views", "score", "title"]
threads = sorted(threads, key=lambda x: getattr(x, sort_by), reverse=True)
return threads
def search_threads(threads, regex):
"""Match deal title and dealer names with regex specified."""
regexp = re.compile(str(regex).lower())
for deal in threads:
if regexp.search(deal.title.lower()) or (
deal.dealer_name and regexp.search(deal.dealer_name.lower())
):
yield deal
def generate_thread_output(threads):
for count, thread in enumerate(threads, 1):
output = ""
dealer = thread.dealer_name
if dealer and dealer is not None:
dealer = "[" + dealer + "] "
else:
dealer = ""
output += (
" "
+ str(count)
+ "."
+ get_vote_color(thread.score)
+ Fore.RESET
+ "%s%s" % (dealer, thread.title)
+ Fore.LIGHTYELLOW_EX
+ " (%d views)" % thread.views
+ Fore.RESET
)
output += Fore.BLUE + " {}".format(thread.url)
output += Style.RESET_ALL
output += "\n\n"
yield output

View File

@@ -1,4 +1,18 @@
"""This module provides utility functions that are used within rfd"""
try:
from urllib.parse import urlparse # python 2
except ImportError:
from urlparse import urlparse # python 1
from bs4 import BeautifulSoup
def strip_html(text):
return BeautifulSoup(text, "html.parser").get_text()
def is_valid_url(url):
result = urlparse(url)
return all([result.scheme, result.netloc, result.path])
def is_int(number):

View File

@@ -0,0 +1,25 @@
from subprocess import Popen, PIPE
import pytest
def run_cli(args):
cmd = ["python", "-m", "rfd"] + args.split()
p = Popen(cmd, stdout=PIPE)
stdout, _ = p.communicate()
assert p.returncode == 0
return stdout
def test_version():
stdout = run_cli("--version")
assert b"rfd v" in stdout
@pytest.mark.parametrize("args", ["", "--sort-by score"])
def test_threads(args):
run_cli("threads " + args)
@pytest.mark.parametrize("args", ["'pizza'", "'(coffee|starbucks)'"])
def test_search(args):
run_cli("search " + args)

View File

@@ -1,5 +1,5 @@
from rfd.api import extract_post_id
from rfd.parsing import build_web_path, parse_threads
from rfd.threads import build_web_path, parse_threads
def test_build_web_path():
@@ -19,11 +19,10 @@ def test_extract_post_id():
def test_parse_threads(threads_api_response):
limit = 10
threads = parse_threads(threads_api_response, limit)
assert len(threads) == limit
threads = parse_threads(threads_api_response.get("topics"))
assert len(threads) == 10
def test_parse_threads_empty():
assert parse_threads(None, 10) == []
assert parse_threads(None) == []