Refractor code
* This project now should make proper use of modules * HTTP specific logic was extracted from app.d to http.d * Logic for managing files in memory was moved to cache.d from watcher.d * Added license
This commit is contained in:
parent
7e01e575fc
commit
35d5b02b5e
12 changed files with 799 additions and 89 deletions
91
source/nl/netsoj/chris/blog/cache.d
Normal file
91
source/nl/netsoj/chris/blog/cache.d
Normal file
|
@ -0,0 +1,91 @@
|
|||
import std.experimental.logger;
|
||||
import std.traits;
|
||||
|
||||
import article;
|
||||
import page;
|
||||
import project;
|
||||
|
||||
|
||||
/**
|
||||
* Default ordering and list with pointers to ordered articles.
|
||||
* (Note: this is code which will actually be compiled and passed on!)
|
||||
*/
|
||||
GenericCache!(Article, "a.firstPublished > b.firstPublished") articles;
|
||||
GenericCache!(Page, "a.title < b.title") pages;
|
||||
GenericCache!(Project, "a.title < b.title") projects;
|
||||
|
||||
/**
|
||||
* In memory cache of T (where T is like a page). Right now it simply holds everything in memory.
|
||||
*
|
||||
* At a later date, this cache might start unloading lesser-accessed items, and load them later
|
||||
* again if needed.
|
||||
*/
|
||||
struct GenericCache(T, string sortOrder)
|
||||
if (isImplicitlyConvertible!(T, Page)) {
|
||||
public:
|
||||
|
||||
void addItem(T item) {
|
||||
logf("Added %s '%s'", T.stringof, item.slug);
|
||||
m_map[item.slug] = item;
|
||||
sortItems();
|
||||
}
|
||||
void removeItem(T item) {
|
||||
logf("Removed %s '%s'", T.stringof, item.slug);
|
||||
m_map.remove(item.slug);
|
||||
sortItems();
|
||||
}
|
||||
|
||||
void removeItemByName(string name) {
|
||||
foreach(item; m_map.byValue) {
|
||||
if (item.name == name) removeItem(item);
|
||||
}
|
||||
}
|
||||
|
||||
void changeItem(T item) {
|
||||
import std.algorithm;
|
||||
import std.range;
|
||||
|
||||
auto r = m_map.byValue.find!((a,b) => a.name == b.name)(item);
|
||||
if (r.empty()) {
|
||||
warningf("Could not find old %s with name %s", T.stringof, item.name);
|
||||
return;
|
||||
}
|
||||
T oldItem = r.front;
|
||||
if (oldItem.slug != item.slug) {
|
||||
logf("Slug of %s '%s' changed to '%s'", T.stringof, oldItem.slug, item.slug);
|
||||
m_map.remove(oldItem.slug);
|
||||
}
|
||||
m_map[item.slug] = item;
|
||||
sortItems();
|
||||
}
|
||||
|
||||
/**
|
||||
* Overload []-operator to redirect to our internal map.
|
||||
*/
|
||||
T opIndex(string key) {
|
||||
return m_map[key];
|
||||
}
|
||||
|
||||
T* opBinaryRight(string op)(const scope string key) {
|
||||
static if (op == "in") {
|
||||
return key in m_map;
|
||||
} else static assert(false, "Operation " ~ op ~ "is not supported on this class");
|
||||
}
|
||||
|
||||
T[] sortedList() {
|
||||
return m_publicItems;
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
T[string] m_map;
|
||||
T[] m_publicItems;
|
||||
|
||||
void sortItems() {
|
||||
import std.algorithm;
|
||||
import std.array;
|
||||
m_publicItems = sort!sortOrder(m_map.values)
|
||||
.filter!(x => !x.isHidden)
|
||||
.array;
|
||||
}
|
||||
}
|
|
@ -1,35 +1,9 @@
|
|||
import std.experimental.logger;
|
||||
import std.range;
|
||||
import std.string;
|
||||
import std.stdio;
|
||||
import std.typecons;
|
||||
|
||||
import vibe.d;
|
||||
|
||||
import cache;
|
||||
import article;
|
||||
import page;
|
||||
import project;
|
||||
import watcher;
|
||||
|
||||
/**
|
||||
* Internal list of articles, pages and projects by slug.
|
||||
*/
|
||||
Article[string] articles;
|
||||
Page[string] pages;
|
||||
Project[string] projects;
|
||||
|
||||
/**
|
||||
* Default ordering and list with pointers to ordered articles.
|
||||
* (Note: this is code which will actually be compiled and passed on!)
|
||||
*/
|
||||
immutable string articleSortPred = "a.firstPublished > b.firstPublished";
|
||||
Article*[] articleList;
|
||||
|
||||
immutable string pageSortPred = "a.title < b.title";
|
||||
Page*[] pageList;
|
||||
|
||||
immutable string projectSortPred = "a.title < b.title";
|
||||
Project*[] projectList;
|
||||
|
||||
/**
|
||||
* Output types for the content.
|
||||
|
@ -130,6 +104,7 @@ public:
|
|||
@path("/posts/")
|
||||
void getArticleOverview(HTTPServerRequest req, HTTPServerResponse res) {
|
||||
addCachingHeader(res);
|
||||
Article[] articleList = articles.sortedList;
|
||||
render!("pages/article-list.dt", articleList);
|
||||
}
|
||||
|
||||
|
@ -139,6 +114,7 @@ public:
|
|||
@path("/projects/")
|
||||
void getProjectOverview(HTTPServerRequest req, HTTPServerResponse res) {
|
||||
addCachingHeader(res);
|
||||
Project[] projectList = projects.sortedList;
|
||||
render!("pages/project-list.dt", projectList);
|
||||
}
|
||||
|
||||
|
@ -148,7 +124,6 @@ public:
|
|||
@path("/projects/:slug")
|
||||
void getProject(HTTPServerRequest req, HTTPServerResponse res) {
|
||||
res.headers["Cache-Control"] = "public";
|
||||
//getSingle!(Project, "pages/project.dt")(projects, req, res);
|
||||
mixin(singleResponseMixin("projects", "pages/project.dt"));
|
||||
}
|
||||
/**
|
||||
|
@ -157,7 +132,6 @@ public:
|
|||
@path("/:slug")
|
||||
void getPage(HTTPServerRequest req, HTTPServerResponse res) {
|
||||
addCachingHeader(res);
|
||||
//getSingle!(Page, "pages/page.dt")(pages, req, res);
|
||||
mixin(singleResponseMixin("pages", "pages/page.dt"));
|
||||
}
|
||||
|
||||
|
@ -166,7 +140,6 @@ public:
|
|||
addCachingHeader(res);
|
||||
// If no slug is supplied, it will be adjusted to "index"
|
||||
req.params.addField("slug", "index");
|
||||
//getSingle!(Page, "pages/page.dt")(pages, req, res);
|
||||
mixin(singleResponseMixin("pages", "pages/page.dt"));
|
||||
}
|
||||
}
|
||||
|
@ -179,7 +152,8 @@ void errorPage(HTTPServerRequest req, HTTPServerResponse res, HTTPServerErrorInf
|
|||
render!("pages/error.dt", error)(res);
|
||||
}
|
||||
|
||||
void main() {
|
||||
@trusted
|
||||
void startHTTPServer() {
|
||||
HTTPServerSettings settings = new HTTPServerSettings;
|
||||
settings.bindAddresses = ["0.0.0.0"];
|
||||
settings.port = 3465;
|
||||
|
@ -197,16 +171,4 @@ void main() {
|
|||
router.registerWebInterface(new MijnBlog);
|
||||
|
||||
listenHTTP(settings, router);
|
||||
|
||||
// Start indexing pages.
|
||||
runTask({
|
||||
initPages!(Page, pageSortPred)(pages, pageList, "pages");
|
||||
});
|
||||
runTask({
|
||||
initPages!(Article, articleSortPred)(articles, articleList, "articles");
|
||||
});
|
||||
runTask({
|
||||
initPages!(Project, projectSortPred)(projects, projectList, "projects");
|
||||
});
|
||||
runApplication();
|
||||
}
|
27
source/nl/netsoj/chris/blog/main.d
Normal file
27
source/nl/netsoj/chris/blog/main.d
Normal file
|
@ -0,0 +1,27 @@
|
|||
import std.experimental.logger;
|
||||
import vibe.d;
|
||||
|
||||
import article;
|
||||
import page;
|
||||
import project;
|
||||
|
||||
import cache;
|
||||
import http;
|
||||
import watcher;
|
||||
|
||||
|
||||
void main() {
|
||||
startHTTPServer();
|
||||
|
||||
// Start indexing pages.
|
||||
runTask({
|
||||
initPages!Page(&pages, "pages");
|
||||
});
|
||||
runTask({
|
||||
initPages!Article(&articles, "articles");
|
||||
});
|
||||
runTask({
|
||||
initPages!Project(&projects, "projects");
|
||||
});
|
||||
runApplication();
|
||||
}
|
|
@ -34,9 +34,9 @@ class Article : Page {
|
|||
// Find the first header and mark everything up to that as
|
||||
if (m_excerpt is null) {
|
||||
// an excerpt, used in search results.
|
||||
const long seperatorIndex = indexOf(m_contentSource, "---\n");
|
||||
const uint seperatorIndex = cast(uint) indexOf(m_contentSource, "---\n");
|
||||
this.m_excerpt = this.m_contentSource[seperatorIndex + 4..$];
|
||||
long firstHeaderIndex = indexOf(this.m_excerpt, '#');
|
||||
const uint firstHeaderIndex = cast(uint) indexOf(this.m_excerpt, '#');
|
||||
if (firstHeaderIndex >= 0) {
|
||||
this.m_excerpt = this.m_excerpt[0..firstHeaderIndex];
|
||||
}
|
|
@ -55,8 +55,9 @@ class Page {
|
|||
this.m_name = file;
|
||||
this.m_contentSource = readText(file);
|
||||
// Find the seperator and split the string in two
|
||||
const long seperatorIndex = indexOf(m_contentSource, "---\n");
|
||||
const uint seperatorIndex = cast(uint) lastIndexOf(m_contentSource, "---\n");
|
||||
enforce!ArticleParseException(seperatorIndex >= 0);
|
||||
|
||||
string header = m_contentSource[0..seperatorIndex];
|
||||
|
||||
Node node = Loader.fromString(header).load();
|
|
@ -6,8 +6,8 @@ import dyaml;
|
|||
import vibe.vibe;
|
||||
|
||||
import page;
|
||||
import staticpaths;
|
||||
import utils;
|
||||
import staticpaths;
|
||||
|
||||
/**
|
||||
* Represents a project, like an unfinished application
|
|
@ -3,22 +3,24 @@ import std.algorithm;
|
|||
import std.experimental.logger;
|
||||
import std.file;
|
||||
import std.stdio;
|
||||
import std.traits;
|
||||
|
||||
import vibe.d;
|
||||
|
||||
import app;
|
||||
import cache;
|
||||
import page;
|
||||
|
||||
/**
|
||||
* Loads pages into memory and sets up a "watcher" to watch a directory for file changes.
|
||||
*/
|
||||
void initPages(T, string sortPred)(ref T[string] array, ref T*[] sortedRange, const string directory) {
|
||||
void initPages(T, C)(C *cache, const string directory)
|
||||
if (isImplicitlyConvertible!(T, Page)) {
|
||||
|
||||
bool addPage(string path) {
|
||||
try {
|
||||
T newPage = new T(path);
|
||||
logf("Added %s", newPage.slug);
|
||||
array[newPage.slug] = newPage;
|
||||
cache.addItem(newPage);
|
||||
return true;
|
||||
} catch (page.ArticleParseException e) {
|
||||
logf("Could not parse %s: %s", path, e);
|
||||
|
@ -41,21 +43,11 @@ void initPages(T, string sortPred)(ref T[string] array, ref T*[] sortedRange, co
|
|||
}
|
||||
}
|
||||
|
||||
void sortThings() {
|
||||
sortedRange = sort!(sortPred)(array.values)
|
||||
.filter!(x => !x.isHidden)
|
||||
.map!"&a".array;
|
||||
logf("sorted: %s", sortedRange);
|
||||
}
|
||||
|
||||
if (!existsFile(getWorkingDirectory() ~ directory)) {
|
||||
createDirectory(getWorkingDirectory() ~ directory);
|
||||
}
|
||||
scan(getWorkingDirectory() ~ directory);
|
||||
sortThings();
|
||||
|
||||
DirectoryWatcher watcher = watchDirectory(getWorkingDirectory() ~ directory, true);
|
||||
//auto watcher = FileWatch((getWorkingDirectory() ~ directory).toString(), true);
|
||||
|
||||
bool shouldStop = false;
|
||||
while (!shouldStop) {
|
||||
|
@ -79,21 +71,7 @@ void initPages(T, string sortPred)(ref T[string] array, ref T*[] sortedRange, co
|
|||
T newPage;
|
||||
try {
|
||||
newPage = new T(change.path.toString());
|
||||
log(newPage.slug);
|
||||
if (newPage.slug in array) {
|
||||
log("Slug not changed");
|
||||
array[newPage.slug] = newPage;
|
||||
} else {
|
||||
log("Slug changed");
|
||||
foreach(item; array) {
|
||||
if (item.name == change.path.toString()) {
|
||||
logf("Removed %s, which is the old slug of %s", newPage.slug, item.slug);
|
||||
array.remove(item.slug);
|
||||
}
|
||||
}
|
||||
}
|
||||
logf("%s modified: %s", typeid(T).stringof, newPage.slug);
|
||||
array[newPage.slug] = newPage;
|
||||
cache.changeItem(newPage);
|
||||
} catch(page.ArticleParseException e) {
|
||||
warningf("Could not parse %s", change.path.toString());
|
||||
} catch (Exception e) {
|
||||
|
@ -102,23 +80,13 @@ void initPages(T, string sortPred)(ref T[string] array, ref T*[] sortedRange, co
|
|||
break;
|
||||
case removed:
|
||||
try {
|
||||
foreach(item; array.byValue) {
|
||||
logf(" - %s", item.name);
|
||||
if (item.name == change.path.toString()) {
|
||||
logf("Removed %s", item.slug);
|
||||
array.remove(item.slug);
|
||||
}
|
||||
}
|
||||
} catch(Exception e) {}
|
||||
cache.removeItemByName(change.path.toString());
|
||||
} catch(Exception e) {
|
||||
logf("Error while trying to remove %s: %s", T.stringof, e.msg);
|
||||
}
|
||||
break;
|
||||
default: break;
|
||||
}
|
||||
log("Current state:");
|
||||
//sortedRange = sort!("a."~sortProp~" < b."~sortProp)(array.values);
|
||||
sortThings();
|
||||
foreach (item; array) {
|
||||
logf("%s - %s", item.name, item.slug);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue