restructuring and cleaning up debt

This commit is contained in:
Anish Lakhwara 2022-03-25 01:48:14 +10:00
parent ca6535d6fb
commit 3227771f7e
400 changed files with 204 additions and 183 deletions

View file

Before

Width:  |  Height:  |  Size: 2.3 KiB

After

Width:  |  Height:  |  Size: 2.3 KiB

View file

Before

Width:  |  Height:  |  Size: 2.3 KiB

After

Width:  |  Height:  |  Size: 2.3 KiB

View file

Before

Width:  |  Height:  |  Size: 991 B

After

Width:  |  Height:  |  Size: 991 B

View file

Before

Width:  |  Height:  |  Size: 1.1 KiB

After

Width:  |  Height:  |  Size: 1.1 KiB

View file

Before

Width:  |  Height:  |  Size: 1.3 KiB

After

Width:  |  Height:  |  Size: 1.3 KiB

View file

Before

Width:  |  Height:  |  Size: 1.6 KiB

After

Width:  |  Height:  |  Size: 1.6 KiB

View file

Before

Width:  |  Height:  |  Size: 1.8 KiB

After

Width:  |  Height:  |  Size: 1.8 KiB

View file

Before

Width:  |  Height:  |  Size: 1.9 KiB

After

Width:  |  Height:  |  Size: 1.9 KiB

View file

Before

Width:  |  Height:  |  Size: 2.3 KiB

After

Width:  |  Height:  |  Size: 2.3 KiB

View file

Before

Width:  |  Height:  |  Size: 2.3 KiB

After

Width:  |  Height:  |  Size: 2.3 KiB

View file

Before

Width:  |  Height:  |  Size: 2.8 KiB

After

Width:  |  Height:  |  Size: 2.8 KiB

View file

Before

Width:  |  Height:  |  Size: 1.2 KiB

After

Width:  |  Height:  |  Size: 1.2 KiB

View file

Before

Width:  |  Height:  |  Size: 1.2 KiB

After

Width:  |  Height:  |  Size: 1.2 KiB

View file

Before

Width:  |  Height:  |  Size: 1.3 KiB

After

Width:  |  Height:  |  Size: 1.3 KiB

View file

Before

Width:  |  Height:  |  Size: 1.4 KiB

After

Width:  |  Height:  |  Size: 1.4 KiB

View file

Before

Width:  |  Height:  |  Size: 2.8 KiB

After

Width:  |  Height:  |  Size: 2.8 KiB

View file

Before

Width:  |  Height:  |  Size: 2.8 KiB

After

Width:  |  Height:  |  Size: 2.8 KiB

View file

Before

Width:  |  Height:  |  Size: 799 B

After

Width:  |  Height:  |  Size: 799 B

View file

Before

Width:  |  Height:  |  Size: 977 B

After

Width:  |  Height:  |  Size: 977 B

View file

Before

Width:  |  Height:  |  Size: 1.6 KiB

After

Width:  |  Height:  |  Size: 1.6 KiB

View file

Before

Width:  |  Height:  |  Size: 1.1 KiB

After

Width:  |  Height:  |  Size: 1.1 KiB

View file

Before

Width:  |  Height:  |  Size: 2.3 KiB

After

Width:  |  Height:  |  Size: 2.3 KiB

View file

Before

Width:  |  Height:  |  Size: 2.3 KiB

After

Width:  |  Height:  |  Size: 2.3 KiB

View file

Before

Width:  |  Height:  |  Size: 5.6 KiB

After

Width:  |  Height:  |  Size: 5.6 KiB

View file

Before

Width:  |  Height:  |  Size: 1.3 KiB

After

Width:  |  Height:  |  Size: 1.3 KiB

View file

Before

Width:  |  Height:  |  Size: 4.7 KiB

After

Width:  |  Height:  |  Size: 4.7 KiB

View file

Before

Width:  |  Height:  |  Size: 4.7 KiB

After

Width:  |  Height:  |  Size: 4.7 KiB

View file

@ -1,3 +1,5 @@
from . import mistune
from shared import TagHelper, config
from jinja2 import Environment, FileSystemLoader
import argparse
import glob
@ -7,11 +9,6 @@ import platform
import re
import shutil
from datetime import datetime
# TODO flith
import sys
sys.path.append("./")
from shared import TagHelper, config
from . import mistune
# Default encoding
encoding = 'utf-8'
@ -32,12 +29,15 @@ build_url = config.relative_build_url
if args.prod:
build_url = config.absolute_build_url
def generate_config():
pass
def generate_page(template_file=config.template_main, options=generate_config()):
pass
def generate_navigation(template_nav=config.template_nav):
file_loader = FileSystemLoader('')
env = Environment(loader=file_loader)
@ -47,6 +47,7 @@ def generate_navigation(template_nav=config.template_nav):
output = template.render(index_urls=[f.split(".")[0] for f in index_tags])
return output
def add_file_path(pages, root_dir):
ret = set()
for page in pages:
@ -64,18 +65,21 @@ def generate_index_pages(template=config.template_main, template_nav=config.temp
pages = add_file_path(pages, config.content_folder[0])
entries = create_entries(pages)
# TODO this broke as well, not sure what happened to entries, but the whole page is blank?
subPages = generate_sub_pages(entries, len(entries), config.content_folder[0], False)
subPages = generate_sub_pages(entries, len(
entries), config.content_folder[0], False)
# Build the files by filling out the template
file_loader = FileSystemLoader('')
env = Environment(loader=file_loader)
# regular index pagse use main template
template = env.get_template(template)
current_index_template = template.render(page_body=subPages, page_date=None, page_tags=None, page_navigation=generate_navigation(template_nav), page_title=index.capitalize() + ' index', config=config)
current_index_template = template.render(page_body=subPages, page_date=None, page_tags=None, page_navigation=generate_navigation(
template_nav), page_title=index.capitalize() + ' index', config=config)
slugfile = config.build_folder + index + '_index.html'
with open(slugfile, 'w', encoding=encoding) as fobj:
fobj.write(current_index_template)
generate_table_index()
def generate_table_index(template=config.template_index, template_nav=config.template_nav):
# build the metaIndex page
# This function creates the tag_index.html page
@ -89,7 +93,8 @@ def generate_table_index(template=config.template_index, template_nav=config.tem
current_index_template = template.render(index_urls=parsed_set,
page_date=None,
page_tags=None,
page_navigation=generate_navigation(template_nav),
page_navigation=generate_navigation(
template_nav),
page_title='Piles',
config=config)
slugfile = config.build_folder + 'tag_index.html'
@ -110,7 +115,8 @@ def generate_html_pages(site_folder, entries, template, sub_pages_list, template
template = env.get_template(template)
if len(entry['tags']) > 1:
entry['tags'] = entry['tags'].split(' ')
page_template = template.render(page_body=entry['pageContent'], page_date="<date>%s</date>"%entry['date'], page_tags=entry['tags'], name_of_site="Kitaab", page_navigation=nav_html, page_title=entry['title'], build_url=entry['parent_text'], config=config)
page_template = template.render(page_body=entry['pageContent'], page_date="<date>%s</date>" % entry['date'], page_tags=entry['tags'],
name_of_site="Kitaab", page_navigation=nav_html, page_title=entry['title'], build_url=entry['parent_text'], config=config)
# Checking if content folder exists
folderExists = os.path.exists(site_folder+entry['folder'])
@ -137,9 +143,10 @@ def get_entry_title(page):
textContent = textContent[0]
textContent = textContent.replace('# ', '')
if '%title' in textContent:
textContent = textContent.replace('%title','')
textContent = textContent.replace('%title', '')
else:
textContent = page.split('/')[1][:-5] # remove .wiki from the end of the file name
# remove .wiki from the end of the file name
textContent = page.split('/')[1][:-5]
except FileNotFoundError:
#print(f"Page not found, but present in tags: {page}")
return None
@ -172,6 +179,8 @@ def style_iframes(page):
return page
# Get a list of the tags in the page
def get_tags(page):
regex = r":[a-zA-Z:]*:"
matches = re.finditer(regex, page, re.MULTILINE)
@ -209,6 +218,7 @@ def fix_amp(page):
print(f"{page} is not a string")
return ""
def fix_wiki_links(page, path):
regex = r"\[\[([\s\S]+?\|?[\s\S]+?)\]\]"
matches = re.finditer(regex, page, re.MULTILINE)
@ -217,15 +227,22 @@ def fix_wiki_links(page, path):
captured_group = match.group(groupNum + 1)
link_elem = captured_group.split("|")
if len(link_elem) > 1:
full_url = "<a href='" + build_url + link_elem[0].strip() + ".html' >" + link_elem[1].strip() + "</a>"
full_url = "<a href='" + build_url + \
link_elem[0].strip() + ".html' >" + \
link_elem[1].strip() + "</a>"
else:
if config.flat_build:
full_url = "<a href='" + build_url + link_elem[0].strip() + ".html' >" + link_elem[0].strip() + "</a>"
full_url = "<a href='" + build_url + \
link_elem[0].strip() + ".html' >" + \
link_elem[0].strip() + "</a>"
else:
full_url = "<a href='" + build_url + path + "/" + link_elem[0].strip() + ".html' >" + link_elem[0].strip() + "</a>"
full_url = "<a href='" + build_url + path + "/" + \
link_elem[0].strip() + ".html' >" + \
link_elem[0].strip() + "</a>"
page = page.replace(match[0], full_url)
return page
def remove_tags(page):
try:
regex = r":[a-zA-Z:]*:"
@ -235,6 +252,7 @@ def remove_tags(page):
print(f'Unexpected error in removing tags in {page}')
return page
def remove_title(page):
try:
textContent = page.splitlines()
@ -247,6 +265,7 @@ def remove_title(page):
print(f'Unexpected error in removing title in {page}')
return page
def remove_date(page):
try:
regex = r"%date [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}"
@ -277,12 +296,12 @@ def replacement_function(replace):
def replace_generated(page):
#try:
# try:
page = re.sub(r'@<[A-Za-z- ]*>',
replacement_function,
page)
return page
#except Exception as e:
# except Exception as e:
# print(e)
# print(f"Could not replace generated in {page}")
# return page
@ -294,13 +313,14 @@ def get_date(page):
match = re.findall(regex, page)[0].strip('%date')
return match
except:
#print(f"Could not parse date")
# print(f"Could not parse date")
return None
def parse_headings(page):
try:
page = re.sub(r'= [\[\]a-zA-Z0-9:|+& ]* =',
lambda x: x.group().replace('=','###', 1).strip('='),
lambda x: x.group().replace('=', '###', 1).strip('='),
page)
return page
except Exception as e:
@ -308,6 +328,8 @@ def parse_headings(page):
return page
# From the list of files, creates the main array of entries that will be processed later
def create_entries(pages):
fullContent = []
noTitle = []
@ -376,8 +398,8 @@ def clean_path(path):
items = path_clean.split('/')
path_items = {
"slug" : None,
"date" : None,
"slug": None,
"date": None,
"folder": items[0],
"file": items[1]
}
@ -394,24 +416,27 @@ def clean_path(path):
has_date = True
else:
pass
#print(f"{path} does not have a date")
# print(f"{path} does not have a date")
if has_date:
if match[0] != path_items["file"]:
path_items["date"] = match[0]
path_items["slug"] = path_items["file"].replace(match[0] + "-", "") + ".html"
path_items["slug"] = path_items["file"].replace(
match[0] + "-", "") + ".html"
else:
path_items["date"] = match[0]
path_items["slug"] = path_items["file"] + ".html"
# Converts the EU date to US date to allow page sorting
if config.date_format == "EU":
path_items["iso_date"] = str(datetime.strptime(path_items["date"], '%y%m%d-%H%M'))
path_items["iso_date"] = str(
datetime.strptime(path_items["date"], '%y%m%d-%H%M'))
if config.date_format == "ISO":
path_items["iso_date"] = str(datetime.strptime(path_items["date"], '%Y-%m-%d'))
path_items["iso_date"] = str(
datetime.strptime(path_items["date"], '%Y-%m-%d'))
else:
path_items["slug"] = path_items["file"] + ".html"
#last_edit = str(subprocess.check_output('git log -1 --format="%ci" ' + path, shell=True)).replace("b'", "").replace("\\n'", '')
# last_edit = str(subprocess.check_output('git log -1 --format="%ci" ' + path, shell=True)).replace("b'", "").replace("\\n'", '')
# get last edited time from system
try:
last_edit = os.path.getmtime('/home/anish/'+path)
@ -439,11 +464,12 @@ def clean_path(path):
else:
if config.flat_build:
path_items["parent_url"] = path_items["folder"] + ".html"
path_items["parent_text"] = path_items["folder"].replace("-", " ").capitalize()
path_items["parent_text"] = path_items["folder"].replace(
"-", " ").capitalize()
else:
path_items["parent_url"] = path_items["folder"]
path_items["parent_text"] = path_items["folder"].replace("-", " ").capitalize()
path_items["parent_text"] = path_items["folder"].replace(
"-", " ").capitalize()
return path_items
@ -457,8 +483,10 @@ def generate_sub_pages(entries, num, folder, title, title_name=""):
# Create the list
file_loader = FileSystemLoader('')
env = Environment(loader=file_loader)
template = env.get_template(config.template_list) # TODO maybe not hardcode
sub_page_list = template.render(entries=selected_entries, title_name=title_name, title=title)
# TODO maybe not hardcode
template = env.get_template(config.template_list)
sub_page_list = template.render(
entries=selected_entries, title_name=title_name, title=title)
return sub_page_list
@ -531,8 +559,9 @@ def generate_website():
# Get main html template
file_loader = FileSystemLoader('')
env = Environment(loader=file_loader)
home_page = env.get_template(config.template_main) # TODO maybe not hardcode
#home_page = create_home_page(template, config.build_folder)
home_page = env.get_template(
config.template_main) # TODO maybe not hardcode
# home_page = create_home_page(template, config.build_folder)
generate_index_pages(config.template_main, config.template_nav)
@ -544,11 +573,12 @@ def generate_website():
sub_pages_list = generate_sub_pages(
entries, len(entries), folder, False)
# For each section, create a short listing of sub pages and add it to the home page
# For each section, create a short listing of sub pages
# and add it to the home page
home_pageSubList = generate_sub_pages(entries, 50, folder, False)
#home_page = home_page.replace('content_list', home_pageSubList +
# home_page = home_page.replace('content_list', home_pageSubList +
# "content_list")
#home_page = home_page.replace('page_date', "")
# home_page = home_page.replace('page_date', "")
generate_html_pages(config.build_folder, entries, config.template_main,
sub_pages_list, config.template_nav)
@ -562,13 +592,14 @@ def generate_website():
# Once all sections have been processed, finish the home page
# Removes the "content_list" in the partial
#home_page = home_page.replace('content_list', "")
#home_page = home_page.replace('name_of_site', config.name_of_site)
#home_page = home_page.replace('site_meta_description',
# home_page = home_page.replace('content_list', "")
# home_page = home_page.replace('name_of_site', config.name_of_site)
# home_page = home_page.replace('site_meta_description',
# config.site_meta_description)
#home_page = home_page.replace(
# home_page = home_page.replace(
# 'twitter_name', config.twitter_name)
home_page = home_page.render(page_body=home_pageSubList, page_date=None, page_tags=None, page_navigation=generate_navigation(), page_title="Recent Files", config=config)
home_page = home_page.render(page_body=home_pageSubList, page_date=None, page_tags=None,
page_navigation=generate_navigation(), page_title="Recent Files", config=config)
slug_file = config.build_folder + "recents.html"
with open(slug_file, 'w', encoding=encoding) as fobj:

View file

@ -6,15 +6,11 @@
# filter - leave names in tact but do not link to relevant documents
# create - list in an index file for website
from shared.censor import privateList
from shared import TagHelper, config
from shared import TagHelper, config, helpers
from os import listdir, mkdir, path, remove
import shutil
import random
import json
from . import convert
import os
import sys
sys.path.append("../")
# TODO: generalize based on headings(?) in index.wiki
@ -23,22 +19,7 @@ censor = privateList
filter = ["dream", "gratitude", "review", "jrnl", "work", "job", "debt",
"people", "priv", "threatdefence", "info"]
# TODO create = ...
def loadNetwork(path='network.json'):
# Loads the network.json file
with open(path, 'r') as f:
everything = json.load(f)
return everything
def setNetwork(everything):
# Everything is loaded from the network.json file
# keys = set(everything.network())
network = ""
values = set(everything.values())
everything = network + values
return everything
outputBasePath = config.raw_folder
def filterFiles(AllFileList):
@ -53,43 +34,6 @@ def filterFiles(AllFileList):
return AllFileList - doNotTakeFiles
def untaggedFiles(AllFileList=TagHelper.getAllFiles()):
toFilter = filter + censor
tagList = TagHelper.getAllTags()
doNotTakeFiles = set()
for mask in toFilter:
try:
doNotTakeFiles.update(tagList[mask])
except KeyError:
print("Key not found: {}".format(mask))
return set.difference(AllFileList, doNotTakeFiles)
def censorNetwork(network):
# Takes network as a set
for c in censor:
if c in network:
network.add(generateBlocks(len(c) + random.randint(0, 5)))
network.remove(c)
return network
def generateBlocks(size=1):
# Generates a length size unicode string that is formed from blocks
charList = ['', '', '', '', '', '', '▖▗',
'▘▙', '', '', '', '', '', '']
ret = ""
while size > 0:
size = size - 1
rand = random.randint(0, 13)
ret = ret + charList[rand]
return ret
def createIndexNode(network):
pass
def copyFiles(fileList):
baseFilePath = '/home/anish/vimwiki/'
outputFilePath = '../raw/' # TODO relative path....
@ -105,7 +49,7 @@ def copyFiles(fileList):
outputFilePath+f))
def censorFiles(outputBasePath='../raw/'):
def censorFiles(outputBasePath=outputBasePath):
for fileName in listdir(outputBasePath):
if not path.isfile(outputBasePath + fileName):
continue
@ -114,14 +58,14 @@ def censorFiles(outputBasePath='../raw/'):
with open(fileName, 'r', errors='ignore') as f:
for line in f.readlines():
for c in censor:
line = line.replace(c, generateBlocks(len(c)))
line = line.replace(c, helpers.generateBlocks(len(c)))
newFile = newFile + line
with open(fileName, 'w') as f:
for line in newFile:
f.write(line)
def convertFiles(outputBasePath='../raw/'):
def convertFiles(outputBasePath=outputBasePath):
for filename in listdir(outputBasePath):
if not path.isfile(outputBasePath + filename):
continue

35
src/grab/network.py Normal file
View file

@ -0,0 +1,35 @@
import json
import random
from shared import helpers
from shared.censor import privateList
censor = privateList
def loadNetwork(path='network.json'):
# Loads the network.json file
with open(path, 'r') as f:
everything = json.load(f)
return everything
def setNetwork(everything):
# Everything is loaded from the network.json file
# keys = set(everything.network())
network = ""
values = set(everything.values())
everything = network + values
return everything
def censorNetwork(network):
# Takes network as a set
for c in censor:
if c in network:
network.add(helpers.generateBlocks(len(c) + random.randint(0, 5)))
network.remove(c)
return network
def createIndexNode(network):
pass

View file

@ -1,2 +0,0 @@
output/
media/big-self.jpg

View file

@ -1,22 +0,0 @@
content_folder = ['raw']
nav_tags = ['dev', 'way', 'recipe', 'ops', 'blog', 'rock', 'phil', 'story', 'blog', 'untagged']
relative_build_url = ""
absolute_build_url = "https://anish.lakhwara.com/"
files_folder = '' #TODO
build_folder = "output/"
assets_folder = "asset/"
medias_folder = "media/"
template_main = "partial/main.html"
template_nav = "partial/nav.html"
template_index = "partial/index_table.html"
template_list = "partial/list.html"
rss_template = "partial/rss.xml"
rss_item_template = "partial/item.xml"
name_of_site = "Kitaab"
site_meta_description = ""
twitter_name = "@aynish@merveilles.town"
home_name = "Home"
see_all = "See more"
go_back = "Go back to"
date_format = "EU" # Choose between EU day-month-year or ISO year-mont-day
flat_build = True # True or False, if True, all files will be written in the root of the website

View file

@ -1,23 +0,0 @@
content_folder = ['raw']
nav_tags = ['dev', 'way', 'recipe', 'ops', 'blog', 'rock', 'phil', 'story', 'blog', 'untagged']
relative_build_url = ""
absolute_build_url = "https://anish.lakhwara.com/"
files_folder = '' #TODO
build_folder = "output/"
assets_folder = "asset/"
medias_folder = "media/"
raw_folder = "raw/"
template_main = "partial/main.html"
template_nav = "partial/nav.html"
template_index = "partial/index_table.html"
template_list = "partial/list.html"
rss_template = "partial/rss.xml"
rss_item_template = "partial/item.xml"
name_of_site = "Kitaab"
site_meta_description = ""
twitter_name = "@aynish@merveilles.town"
home_name = "Home"
see_all = "See more"
go_back = "Go back to"
date_format = "EU" # Choose between EU day-month-year or ISO year-mont-day
flat_build = True # True or False, if True, all files will be written in the root of the website

View file

Before

Width:  |  Height:  |  Size: 411 KiB

After

Width:  |  Height:  |  Size: 411 KiB

BIN
src/media/big-self.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 97 KiB

View file

Before

Width:  |  Height:  |  Size: 2.5 MiB

After

Width:  |  Height:  |  Size: 2.5 MiB

View file

Before

Width:  |  Height:  |  Size: 212 KiB

After

Width:  |  Height:  |  Size: 212 KiB

View file

Before

Width:  |  Height:  |  Size: 214 KiB

After

Width:  |  Height:  |  Size: 214 KiB

View file

Before

Width:  |  Height:  |  Size: 440 KiB

After

Width:  |  Height:  |  Size: 440 KiB

View file

Before

Width:  |  Height:  |  Size: 248 KiB

After

Width:  |  Height:  |  Size: 248 KiB

View file

Before

Width:  |  Height:  |  Size: 308 KiB

After

Width:  |  Height:  |  Size: 308 KiB

View file

Before

Width:  |  Height:  |  Size: 42 KiB

After

Width:  |  Height:  |  Size: 42 KiB

Some files were not shown because too many files have changed in this diff Show more