markdown/github shit

This commit is contained in:
Nemo 2019-11-17 09:28:29 +05:30
parent b5244edafc
commit d87961c35b
3 changed files with 209 additions and 0 deletions

84
ghissues2csv Executable file
View File

@ -0,0 +1,84 @@
#!/usr/bin/env python
"""
Exports issues from a list of repositories to individual CSV files.
Uses OAuth authentication (Github token) to retrieve issues
from a repository that username has access to. Supports Github API v3.
Forked from: unbracketed/export_repo_issues_to_csv.py
Original Source: https://gist.github.com/kassyuz/a5eddb0a9bec83768b8db16e6d6b4263
"""
import argparse
import csv
import requests
import json
# encoding=utf8
import sys
reload(sys)
sys.setdefaultencoding('utf8')
auth = None
state = 'open'
def write_issues(r, csvout):
"""Parses JSON response and writes to CSV."""
if r.status_code != 200:
raise Exception(r.status_code)
for issue in r.json():
if 'pull_request' not in issue:
labels = ', '.join([l['name'] for l in issue['labels']])
date = issue['created_at'].split('T')[0]
closed_date = ''
if issue['state'] != 'open':
closed_date = issue['created_at'].split('T')[0]
# Change the following line to write out additional fields
csvout.writerow([labels, issue['user']['login'], issue['number'], issue['title'], issue['state'], date, closed_date,
issue['html_url']])
def get_issues(name):
"""Requests issues from GitHub API and writes to CSV file."""
url = 'https://api.github.com/repos/{}/issues?state={}'.format(name, state)
print(url)
r = gh_session.get(url)
csvfilename = '{}-issues.csv'.format(name.replace('/', '-'))
with open(csvfilename, 'w') as csvfile:
csvout = csv.writer(csvfile)
csvout.writerow(['Labels', 'User-Git', 'Number', 'Title', 'State', 'Created Date', 'Closed Date', 'URL'])
write_issues(r, csvout)
# Multiple requests are required if response is paged
if 'link' in r.headers:
pages = {rel[6:-1]: url[url.index('<')+1:-1] for url, rel in
(link.split(';') for link in
r.headers['link'].split(','))}
while 'last' in pages and 'next' in pages:
pages = {rel[6:-1]: url[url.index('<')+1:-1] for url, rel in
(link.split(';') for link in
r.headers['link'].split(','))}
r = requests.get(pages['next'], auth=auth)
write_issues(r, csvout)
if pages['next'] == pages['last']:
break
parser = argparse.ArgumentParser(description="Write GitHub repository issues "
"to CSV file.")
parser.add_argument('repositories', nargs='+', help="Repository names, "
"formatted as 'username/repo'")
parser.add_argument('--all', action='store_true', help="Returns both open "
"and closed issues.")
args = parser.parse_args()
if args.all:
state = 'all'
# user = raw_input('Insert your Github user >> ')
token = raw_input('Insert your Github Token (read:org and repo permissions) >> ')
gh_session = requests.Session()
gh_session.auth = ('captn3m0', token)
for repository in args.repositories:
get_issues(repository)

12
github-init-repo Executable file
View File

@ -0,0 +1,12 @@
#!/bin/bash
ORG=$1
REPO=$2
mkdir $REPO
pushd $REPO
touch README.md
git init
git add README.md
git commit -m "first commit"
git remote add origin https://github.com/$ORG/$REPO.git
git push -u origin master
popd

113
mdtable2csv Executable file
View File

@ -0,0 +1,113 @@
#!/usr/bin/env python
# coding:utf-8
# Created by tom_th_lin on 2015/9/18.
import sys
import io
import requests
from flask import abort, json
# from md2html import _read_file_or_404
# from md2html import render_content
from bs4 import BeautifulSoup
###
# define function
###
def _read_file_or_404(filename, read_as_text=True):
"""
Reads the contents of the specified file, or raise 404.
"""
mode = 'rt' if read_as_text else 'rb'
encoding = 'utf-8' if read_as_text else None
try:
with io.open(filename, mode, encoding=encoding) as f:
return f.read()
except IOError as ex:
if ex.errno != errno.ENOENT:
raise
abort(404)
def render_content(text, api_url, gfm=False, context=None,
username=None, password=None):
"""
Renders the specified markup using the GitHub API.
"""
if gfm:
url = '{}/markdown'.format(api_url)
data = {'text': text, 'mode': 'gfm'}
if context:
data['context'] = context
data = json.dumps(data, ensure_ascii=False).encode('utf-8')
headers = {'content-type': 'application/json; charset=UTF-8'}
else:
url = '{}/markdown/raw'.format(api_url)
data = text.encode('utf-8')
headers = {'content-type': 'text/x-markdown; charset=UTF-8'}
auth = (username, password) if username or password else None
r = requests.post(url, headers=headers, data=data, auth=auth)
# Relay HTTP errors
if r.status_code != 200:
try:
message = r.json()['message']
except Exception:
message = r.text
abort(r.status_code, message)
return r.text
###
#
# Start program.
#
###
if len(sys.argv) >= 2:
filename = sys.argv[1]
else:
print("please put sys.argv[1] as filename !!")
sys.exit()
if not (filename[-2:] == 'md'):
print("the file format should be *.md")
sys.exit()
render_text = _read_file_or_404(filename)
# print(render_text)
api_url = 'https://api.github.com'
html_table = render_content(render_text, api_url, True, None, None, None)
soup = BeautifulSoup(html_table, "html.parser") # parse html
table = soup.table # get <table>...</table>
# print(table)
f = open(filename[0:filename.find('.')] + '.csv', 'w') # open file.csv
rows = table.find_all('tr') # get all content <tr></tr>
ths = rows[0].find_all('th')
write_th_to_file = ''
for th in ths:
if th.string is not None:
write_th_to_file += (th.string + ',')
else:
write_th_to_file += (' ' + ',')
write_th_to_file = write_th_to_file[:-1]
f.write(write_th_to_file)
f.write('\n')
for row in rows[1:]:
write_td_to_file = ''
tds = row.find_all('td')
for td in tds:
if td.string is not None:
write_td_to_file += (td.string+',')
else:
write_td_to_file += (' '+',')
write_td_to_file = write_td_to_file[:-1]
# print(write_td_to_file)
f.write(write_td_to_file)
f.write('\n')
f.close()
print("convertion successfully done")