Website draft

This commit is contained in:
Mentor Palokaj 2021-10-16 13:20:49 +02:00
parent 9037e9f993
commit b2005ee2a4
38 changed files with 19653 additions and 0 deletions

12
website/.babelrc Normal file
View File

@ -0,0 +1,12 @@
{
"presets": [
[ "@babel/preset-env", {
"loose": true,
"targets": { "browsers": [ "last 2 versions" ] },
"debug": false
} ]
],
"plugins": [
"@babel/plugin-transform-runtime"
]
}

42
website/.eslintrc.js Normal file
View File

@ -0,0 +1,42 @@
module.exports = {
// Recommended features
"extends": [ "eslint:recommended" ],
//Parser features
parser: "@babel/eslint-parser",
parserOptions: {
requireConfigFile: false,
ecmaVersion: 12,
sourceType: "module",
ecmaFeatures: {
experimentalObjectRestSpread: true
}
},
// Specific rules, 2: err, 1: warn, 0: off
rules: {
"prefer-arrow-callback": 2,
"no-mixed-spaces-and-tabs": 1,
"no-unused-vars": [ 1, { vars: 'all', args: 'none' } ], // All variables, no function arguments
// React specific
"react/prop-types": 0,
"react/display-name": 0
},
// What environment to run in
env:{
node: true,
browser: true,
mocha: true,
jest: false,
es6: true
},
// What global variables should be assumed to exist
globals: {
context: false
}
}

45
website/.gitignore vendored Normal file
View File

@ -0,0 +1,45 @@
# Tests
broken-links.json
# Logs
logs
*.log
npm-debug.log*
public
# Runtime data
pids
*.pid
*.seed
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# nyc test coverage
.nyc_output
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# node-waf configuration
.lock-wscript
# Compiled binary addons (http://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules
jspm_packages
# Optional npm cache directory
.npm
# Optional REPL history
.node_repl_history
# Public dir
public/*
.env

1
website/.nvmrc Normal file
View File

@ -0,0 +1 @@
15

92
website/README.md Normal file
View File

@ -0,0 +1,92 @@
# 👋 🔫 HI PEW - High Performance Website Boilerplate
<img height="50px" style="float: left;" alt="webpack" src="http://i.imgur.com/xz36f45.png" /> <img height="50px" style="float: left;" alt="browsersync" src="http://i.imgur.com/L5peje9.png" /> <img height="50px" style="float: left;" alt="pug" src="http://i.imgur.com/x4sHEg4.png" /> <img height="50px" style="float: left;" alt="sass" src="http://i.imgur.com/O9ikKdz.png" />
A static website generator that implements best practices for page speed. [ Click here for a live demo ]( https://actuallymentor.github.io/hi-pew/ ).
- input: Markup in [pug]( https://github.com/pugjs ), styling in [Sass]( https://github.com/sass/sass ) and Javascript with [Babel]( https://babeljs.io/ )
- output: deployment-ready minified, prefixed and compressed build files
Benefits:
- 🚀 100% Google Page Speed Score ([view score]( https://developers.google.com/speed/pagespeed/insights/?url=https://actuallymentor.github.io/hi-pew/ ))
- 👩‍💻 Use `pug`, `sass` and the newest `js` with zero setup time
- 👓 SEO best practices auto-implemented
- 🇪🇺 Built-in multilanguage support
- 🌐 Built-in broken link checker through `npm test`
- 🧪 Advanced performance options and compatibility warnings
## Getting started
Dependencies:
- [node.js]( https://nodejs.org/en/ )
- [nvm]( https://github.com/nvm-sh/nvm ) ( optional, recommended )
### Basic usage
1. Clone this repository
2. Run `npm start`, your browser will open with a live-updating preview
3. Edit the source files in `src/`
4. Configure SEO settings in `modules/config.js`
To create a production build in `docs/`:
```shell
npm run build
```
### Advanced usage
1. Customise auto-image compression
- Edit the `system.images` key to include your compression preferences for `jpeg`, `webp` and `avif`
- Use the `rimg` (responsive img) mixin found in `src/pug/_helpers`
- Use the `cimg` (compressed img) mixin found in `src/pug/_helpers`
- Note: images are not compressed in `NODE_ENV=development` mode which is the `npm start` default, `npm run build` does trigger that actual file optimisation
2. Separate your CSS for meaningful-paint optimisation
- Use `src/css/essential-above-the-fold.sass` for essential above the fold styles
- Use `src/css/styles.sass` for below the fold styles
3. Set per-page SEO settings
- Every `.pug` file may contain it's own metadata and sharing image
- The `page` object can set `title`, `desc`, `url`, `published`, `featuredimg` which are used in the `head` meta tags and the footer `application/ld+json` rich snipped data
4. Confgure deeper browser compatibility
- By default `npm start` runs a [ caniuse ]( https://caniuse.com/ ) compatibility check on your SASS
- Javascript backwards compatibility in `.babelrc`
- CSS compatibility in `modules/config.js` at `browsers`
4. Enable auto-deployment
- Templates for Github pages, Firebase and AWS are available in `.github/workflows`
5. Use subpages (like `/category/people/john.html`)
- Any `.pug` file in `src` will be compiled except those that are in reserved folders or begin with `_`
- `src/index.pug` \> `index.html`
- `src/pages/contact.pug` \> `/pages/contact.html`
- `src/{ assets, content, css, js }/template.pug` \> not rendered
- `src/pug/_footer.pug` \> not rendered (unless included in another pug)
### Multiple languages
Every `.json` or `.js` file in `src/content` will result in a duplicate of your website using the content in that file.
```js
module.exports = {
slug: "/", // The relative URL of this language
lang: "en", // The language code of this language (use W3 compliant codes)
// You can creat any keys and access them inside pug files
hero: {
"title": "something",
"explanation": "lorem ipsum"
},
usps: [ "It's good", "It's free" ]
}
```
The attributes can be read inside any pug template under the `content` variable, for example:
```pug
div.hero
p#title #{ content.hero.title }
a( href=content.slug ) home
div.usp
each usp in content.usps
p= usp
```

73
website/modules/config.js Normal file
View File

@ -0,0 +1,73 @@
const ip = require( 'ip' )
const { normalize } = require( 'path' )
module.exports = {
// ///////////////////////////////
// Identity variables used in pug templates
// Relevant for SEO
// ⚠️ You should edit this section
// ///////////////////////////////
identity: {
title: "Hi Pew",
desc: "High Performance Website Boilerplate",
"logo": "logo.jpg"
},
// ///////////////////////////////
// System vars managing some pug elements as well as file paths
// ///////////////////////////////
system: {
// // ⚠️ You should edit the 'url' key to be the production URL
url: process.env.NODE_ENV == 'production' ? 'https://actuallymentor.github.io/hi-pew/' : 'http://' + ip.address() + ':3000/',
public: normalize( process.env.NODE_ENV == 'production' ? __dirname + '/../docs/' : __dirname + '/../public/' ),
source: normalize( __dirname + '/../src/' ),
timestamp: new Date().getTime(),
year: new Date().getFullYear(),
// Browser compatibility warnings
browser: {
support: {
// browsers: [ 'last 2 versions', 'not dead' ],
// Format your own based on: https://github.com/browserslist/browserslist
browsers: [ '>1%' ]
}
},
// Image compression settings, these defaults are fine for many people
images: {
defaultCompression: 80, // Default jpeg compression
sizes: [ 240, 480, 720, 1080, 2160, 3840 ], // Image sizes to generate
extensions: [ 'jpg', 'png', 'jpeg', 'webp' ] // Image file extensions to compress and transform
}
},
// ///////////////////////////////
// About the author. Change this to your own
// ⚠️ You should edit this section
// ///////////////////////////////
author: {
firstname: "Mentor",
lastname: "Palokaj",
email: "mentor@palokaj.co",
twitter: "@actuallymentor",
// facebook profile id, used for retargeting ad permissions
facebook: "1299359953416544",
url: "https://www.skillcollector.com/"
},
// ///////////////////////////////
// Tracking codes
// ⚠️ You should edit this section
// ///////////////////////////////
track: {
gverification: undefined, // Google webmaster verification code
gtm: undefined // Google tag manager code
}
}

View File

@ -0,0 +1,61 @@
const { promises: fs } = require( 'fs' )
const { normalize } = require('path')
const del = require( 'del' )
const mkdirp = require( 'mkdirp' )
const wait = ms => new Promise( res => setTimeout( res, ms ) )
// Promise structure for writing a file to disk
const writefile = fs.writeFile
// Check if a resource exists
const exists = what => fs.access( what ).then( f => true ).catch( f => false )
// Delete a folder through the promise api
const delp = async what => {
const file = await exists( what )
if( file ) return del.sync( [ what ] )
}
// Make directory if it does not exist yet
const mkdir = async path => {
const file = await exists( path )
// console.log( file ? '✅ exists ' : '🛑 not exists ', path )
if( !file ) {
// console.log( '👵 creating ', path )
const folder = await mkdirp( path )
// await wait( 5000 )
// console.log( 'Creation of ', path, folder )
// await wait( 5000 )
// file = await exists( path )
}
}
// Read the contents of these files and return as an array
const readdata = ( path, filename ) => fs.readFile( normalize( `${path}/${filename}` ), 'utf8' ).then( data => ( { filename: filename, data: data } ) )
// Safely write a file by chacking if the path exists
const safewrite = async ( path, file, content ) => {
try {
path = normalize( path )
await mkdir( path )
await writefile( path + file, content )
} catch( e ) {
console.log( `Error writing ${ path }${ file }: `, e )
}
}
module.exports = {
write: writefile,
swrite: safewrite,
del: delp,
mkdir: mkdir,
readFile: readdata,
exists: exists
}

View File

@ -0,0 +1,92 @@
const sharp = require('sharp')
const fs = require( 'fs' )
const path = require( 'path' )
const { promises: pfs } = require( 'fs' )
const { mkdir } = require( __dirname + '/parse-fs' )
// No limit to streams
process.setMaxListeners( 0 )
// Promisify streams
const stream = ( readstream, writepath, transform ) => new Promise( ( resolve, reject ) => {
// Dry run config for dev mode
const { NODE_ENV } = process.env
// Make the write stream
const write = fs.createWriteStream( writepath )
// Enable the writing pipe
if( NODE_ENV == 'development' ) readstream.pipe( write )
else readstream.pipe( transform ).pipe( write )
write.on( 'close', resolve )
write.on( 'error', reject )
} )
const compressOneImageToMany = async ( site, filename ) => {
if( !filename ) return 'This was a deletion'
try {
// System settings
const { system: { images } } = site
const { sizes=[], defaultQuality } = images
console.log( `⏱ Compressing ${ sizes.length * 3 } forms of `, `${ site.system.source }assets/${ filename }` )
// Image metadata
const filePath = `${ site.system.source }/assets/${ filename }`
const metadata = await sharp( filePath ).metadata()
const selectMaxSize = size => {
if( size < metadata.width ) return size
else return metadata.width
}
// Create convertor stream handlers
const jpegConversionStreams = sizes.map( size => ( {
convertor: sharp().resize( selectMaxSize( size ), undefined ).jpeg( { quality: defaultQuality } ),
size: size,
extension: 'jpg'
} ) )
const webpConversionStreams = sizes.map( size => ( {
convertor: sharp().resize( selectMaxSize( size ), undefined ).webp( { quality: defaultQuality } ),
size: size,
extension: 'webp'
} ) )
const avifConversionStreams = sizes.map( size => ( {
convertor: sharp().resize( selectMaxSize( size ), undefined ).avif( { quality: defaultQuality } ),
size: size,
extension: 'avif'
} ) )
// Read stream of the image
const imageStream = fs.createReadStream( filePath )
// Create the folder (or even subfolder) the image is in
const parentFolder = path.dirname( filename )
await mkdir( `${ site.system.public }/assets/${ parentFolder }` )
// Create streams for all the transforms
const [ fm, ext ] = ( filename && filename.match( /(?:.*)(?:\.)(.*)/ ) ) || []
const fileNameWithoutExt = path.basename( filename, `.${ ext }` )
await Promise.all( [ ...jpegConversionStreams, ...webpConversionStreams, ...avifConversionStreams ].map( ( { convertor, size, extension } ) => {
return stream( imageStream, `${ site.system.public }/assets/${ fileNameWithoutExt }-${ size }.${ extension }`, convertor )
} ) )
console.log( '✅ Compression of ', `${ site.system.source }assets/${ filename }`, 'complete' )
} catch( e ) {
console.log( `Error compressing image: `, e )
throw e
}
}
module.exports = compressOneImageToMany

View File

@ -0,0 +1,32 @@
const fs = require( 'fs' )
const { readFile } = require( './parse-fs' )
// Grab all pug files from the root of the source directory
const getJSONs = path => new Promise( ( resolve, reject ) => {
fs.readdir( path, ( err, files ) => {
if ( err ) return reject( err )
// This will return an array of file names that contain .pug
resolve( files.filter( file => { return file.includes( '.json' ) || file.includes( '.js' ) } ) )
} )
} )
// Validate the structure of the json filename and get the lang for it
// Json file syntax should be language.json ( e.g. en.json or nl.json )
const validateJson = json => new Promise( ( resolve, reject ) => json.lang && json.slug.includes( '/' ) && resolve( json ) || reject( 'Invalid json' ) )
// Return the json files ( as pug syntax )
// Grab all .json files
const getContent = path => getJSONs( path )
// Grab the content of all .json files
// Get the content of each file with it's language string, outputs { filename, content }
.then( files => Promise.all( files.map( async file => {
// Load js module
if( file.includes( 'js' ) ) return require( `${path}/${file}` )
// Extract json data from strings
return readFile( path, file ).then( string => JSON.parse( string.data ) )
} ) ) )
// Validate that the jsons are well-formatted
.then( allJsons => Promise.all( allJsons.map( json => validateJson( json ) ) ) )
module.exports = getContent

View File

@ -0,0 +1,48 @@
const readdir = require( 'recursive-readdir' )
const { readFile } = require( './parse-fs' )
// Grab all pug files from the root of the source directory
const getpugs = async srcPath => {
const ignoreNonPugAndPugWithUnderscore = ( pathname, stats ) => {
// Ignore system folders
if( stats.isDirectory() ) {
if( pathname.includes( `${ srcPath }/assets` ) ) return true
if( pathname.includes( `${ srcPath }/js` ) ) return true
if( pathname.includes( `${ srcPath }/css` ) ) return true
if( pathname.includes( `${ srcPath }/content` ) ) return true
if( pathname.includes( `${ srcPath }/pug` ) ) return true
}
// Traverse all other directories
if( stats.isDirectory() ) return false
// Ignore files that are not pugs or begin with an understore
if( !pathname.includes( '.pug' ) ) return true
if( pathname.match( /\/_.*\.pug$/ ) ) return true
// Keep the rest
return false
}
try {
const filesWithFullPath = await readdir( srcPath, [ ignoreNonPugAndPugWithUnderscore ] )
const filesRelativeToSrc = filesWithFullPath.map( pugpath => pugpath.replace( srcPath, '' ) )
return filesRelativeToSrc
} catch( e ) {
console.error( `Error getting pugs from: `, e )
}
}
// Use the above two promises to return the pug files ( as pug syntax )
// Grab all .pug files
const returnpugs = srcPath => getpugs( srcPath )
// Grab the content of all .pug files
.then( files => Promise.all( files.map( filename => readFile( srcPath, filename ) ) ) )
module.exports = returnpugs

View File

@ -0,0 +1,65 @@
// promise based file parsing
const { del, mkdir } = require( __dirname + '/parse-fs' )
const { promises: fs } = require( 'fs' )
// Recursive copy library
const ncp = require( 'ncp' )
// Image parsing
const compressImage = require( __dirname + '/parse-images' )
// Promise ncp
const pncp = ( source, dest, opt ) => new Promise( ( resolve, reject ) => {
ncp( source, dest, opt, err => err ? reject( err ) : resolve() )
} )
const copyfolder = async ( source, destination, filename ) => {
await mkdir( destination )
// No clobber means no overwrites for existing files
await pncp( source, destination, { clobber: false } )
}
const copyassets = async ( site, filename ) => {
try {
const { extensions } = site.system.images
// Delete relevant assets
if( filename ) await del( `${ site.system.public }/assets/${ filename }` )
else await del( site.system.public + 'assets/*' )
// Copy entire asset folder
await copyfolder( site.system.source + 'assets', site.system.public + 'assets' )
// If single file, compress single file
const [ fullmatch, extOfFilename ] = ( filename && filename.match( /(?:.*)(?:\.)(.*)/ ) ) || [ 0, [] ]
if( extensions.includes( extOfFilename ) ) await compressImage( site, filename )
// If not a single file, grab the images and compress them
if( !filename ) {
const allAssets = await fs.readdir( `${ site.system.source }/assets/` )
const allImages = allAssets.filter( path => {
const [ fm, ext ] = ( path && path.match( /(?:.*)(?:\.)(.*)/ ) ) || []
return extensions.includes( ext )
} )
// Convert all
await Promise.all( allImages.map( img => compressImage( site, img ) ) )
}
} catch( e ) {
console.log( `Error copying assets: `, e )
throw e
}
}
module.exports = copyassets

View File

@ -0,0 +1,71 @@
const sass = require( 'node-sass' )
const { promises: fs } = require( 'fs' )
const { mkdir } = require( './parse-fs' )
const postcss = require( 'postcss' )
const autoprefixer = require( 'autoprefixer' )
const cssnano = require( 'cssnano' )
const doiuse = require( 'doiuse' )
const cssWarning = warning => {
const { feature, featureData } = warning
const { title, missing, partial } = featureData
if( partial ) console.log( "\x1b[33m", `[CSS] partial support - ${ title } - ${ partial }`, "\x1b[0m" )
if( missing ) console.log( "\x1b[31m", `[CSS] missing support - ${ title } - ${ missing } missing support`, "\x1b[0m" )
}
const file = site => new Promise( ( resolve, reject ) => {
const css = {
from: `${site.system.source}css/styles.sass`,
to: `${site.system.public}assets/css/styles.css`
}
mkdir( `${site.system.public}assets/css/` ).then( f => {
sass.render( {
file: css.from,
// Add source map if in dev mode
...( !( process.env.NODE_ENV == 'production' ) && { sourceMap: true, sourceMapEmbed: true } )
}, ( err, result ) => {
if( err || !result ) return reject( err )
// Run postcss with plugins
postcss( [
autoprefixer,
cssnano,
doiuse( { ...site.system.browser, onFeatureUsage: cssWarning } )
] )
.process( result.css, { from: css.from, to: css.to } )
.then( result => fs.writeFile( css.to, result.css ) )
.then( resolve )
} )
} )
} )
const inline = ( site, path ) => new Promise( ( resolve, reject ) => {
sass.render( {
file: path,
// Add source map if in dev mode
...( !( process.env.NODE_ENV == 'production' ) && { sourceMap: true, sourceMapEmbed: true } )
}, ( err, result ) => {
if( err || !result ) return reject( err )
// Run postcss with plugins
postcss( [
autoprefixer,
cssnano,
doiuse( { ...site.system.browser, onFeatureUsage: cssWarning } )
] )
.process( result.css, { from: path, to: path + 'dummy' } )
.then( result => resolve( result.css ) )
.catch( err => console.log( err ) )
} )
} )
module.exports = {
inlinecss: inline,
css: file
}

View File

@ -0,0 +1,107 @@
const getAllPugfiles = require( __dirname + '/parse-pugfiles' )
const getContent = require( __dirname + '/parse-locales' )
const pfs = require( __dirname + '/parse-fs' )
const pug = require( 'pug' )
const { inlinecss } = require( './publish-css' )
const { minify } = require( 'html-minifier' )
const { SitemapStream, streamToPromise } = require( 'sitemap' )
const site = require( __dirname + '/config' )
// Compile pug to html
// Return a resolved promise with the file data
const compilepug = ( path, filename, css, content ) => Promise.resolve( {
path: path,
filename: filename,
baseSlug: content.slug,
lang: content.lang,
// Compile the pug file with the site config as a local variable
html: minify( pug.renderFile( path + filename, { site: site, css: css, content: content, basedir: path } ), {
html5: true,
minifyCSS: true,
minifyJS: true,
collapseWhitespace: true,
conservativeCollapse: true,
processScripts: [ 'application/ld+json' ]
} )
} )
// Construct links
const makeLinks = ( pugfiles, content ) => {
// Paths of files and thus the urls
const paths = pugfiles.map( page => page.filename )
// Structure the URLs for the sitemap package
const structuredUrls = paths.map( path => ( {
url: `${ site.system.url }${ path.split( '.' )[ 0 ] }.html`,
links: content.map( lang => ( {
lang: lang.lang,
// Remove trailing slash freom system ur
url: `${ site.system.url.replace(/\/$/, "") }${ lang.slug }${ path.split( '.' )[ 0 ] }.html`
} ) )
} ) )
return structuredUrls
}
// Make sitemap
const makeSitemap = links => {
const stream = new SitemapStream( { hostname: site.system.url } )
links.map( link => stream.write( link ) )
stream.end()
return streamToPromise( stream ).then( data => data.toString() )
}
// Run a promise for every content item
const makeAllPugs = ( pugstrings, css, contents ) => Promise.all( contents.map( content => {
// For every json declaration, make all pages
return pugstrings.map( pug => compilepug( site.system.source, pug.filename, css, content ) )
// Flatten the array of arrays to just be an array of promises
} ).flat() )
// Write html to disk
// Use the safe write feature of the psf module
const writehtml = async ( site, page ) => {
const folder = site.system.public + page.baseSlug + page.filename.split( '.' )[ 0 ]
await pfs.mkdir( folder )
return Promise.all( [
pfs.swrite( site.system.public + page.baseSlug, `${ page.filename.split( '.' )[ 0 ] }.html`, page.html ),
pfs.swrite( site.system.public + page.baseSlug + page.filename.split( '.' )[ 0 ] + '/', `index.html`, page.html )
] )
}
const writeSitemap = ( site, sitemap ) => pfs.swrite( site.system.public, 'sitemap.xml', sitemap.toString() )
// Combine the above two and the parse-pugfiles module to read, compile and write all pug files
// Make the public directory
const publishfiles = async ( site, filter ) => {
await pfs.mkdir( site.system.public )
// Grab the pug data from disk
const [ pugfiles, css, content ] = await Promise.all( [
getAllPugfiles( site.system.source ),
inlinecss( site, `${ site.system.source }css/essential-above-the-fold.sass` ),
getContent( `${ site.system.source }content` )
] )
// Parse pug into html
// Pugfiles have .filename and .data
// If fitler applied, only build pug htmls but include all links into the links for sitemap
const filteredPugfiles = filter ? pugfiles.filter( ( { filename } ) => filename.includes( filter ) ) : pugfiles
const [ htmls, links ] = await Promise.all( [
makeAllPugs( filteredPugfiles, css, content ),
makeLinks( pugfiles, content )
] )
// Write html files to disk
// Html ( page ) has .path, .filename .baseSlug and .html
return Promise.all( [
Promise.all( htmls.map( page => writehtml( site, page ) ) ),
writeSitemap( site, await makeSitemap( links ) )
] )
}
module.exports = publishfiles

18155
website/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

62
website/package.json Normal file
View File

@ -0,0 +1,62 @@
{
"name": "static-page-boilerplate-webpack-browsersync",
"version": "1.0.0",
"description": "Static Webpage with Webpack and Browsersync Boilerplate",
"main": "app.js",
"scripts": {
"nvm": "nodever=$(<.nvmrc) && node -v | grep -q \"v$nodever\" && echo \"Correct node version\" || ( echo \"[x] Node version $nodever required\\n\\n\" && exit 1 )",
"test": "mocha -r esm",
"start": "npm run nvm && NODE_ENV=development webpack --progress --watch",
"build": "npm run nvm && NODE_ENV=production webpack --progress"
},
"repository": {
"type": "git",
"url": "git+https://github.com/actuallymentor/static-webpage-boilerplate-webpack-browsersync.git"
},
"author": "Mentor Palokaj <mentor@palokaj.co> (https://github.com/actuallymentor)",
"license": "MIT",
"bugs": {
"url": "https://github.com/actuallymentor/static-webpage-boilerplate-webpack-browsersync/issues"
},
"homepage": "https://github.com/actuallymentor/static-webpage-boilerplate-webpack-browsersync#readme",
"dependencies": {
"@babel/core": "^7.13.15",
"@babel/polyfill": "^7.12.1",
"@babel/preset-env": "^7.13.15",
"autoprefixer": "^10.2.5",
"babel-loader": "^8.2.2",
"browser-sync": "^2.26.14",
"browser-sync-webpack-plugin": "^2.3.0",
"css-loader": "^5.2.1",
"cssnano": "^5.0.0",
"del": "^6.0.0",
"doiuse": "^4.4.1",
"dotenv": "^8.2.0",
"html-minifier": "^4.0.0",
"ip": "^1.1.5",
"mkdirp": "^1.0.4",
"ncp": "^2.0.0",
"node-sass": "^5.0.0",
"postcss": "^8.2.10",
"pug": "^3.0.2",
"sharp": "^0.28.1",
"sitemap": "^6.4.0",
"webpack": "^5.31.2"
},
"devDependencies": {
"@babel/eslint-parser": "^7.13.14",
"@babel/plugin-transform-runtime": "^7.13.15",
"chai": "^4.3.4",
"chai-as-promised": "^7.1.1",
"eslint": "^7.24.0",
"esm": "^3.2.25",
"mocha": "^8.3.2",
"recursive-readdir": "^2.2.2",
"request": "^2.88.2",
"request-promise-native": "^1.0.9",
"webpack-cli": "^4.6.0"
},
"optionalDependencies": {
"fsevents": "^2.3.2"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 789 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 279 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 887 KiB

View File

@ -0,0 +1,4 @@
module.exports = {
"slug": "/",
"lang": "en"
}

View File

@ -0,0 +1,4 @@
{
"slug": "/de/",
"lang": "de"
}

View File

@ -0,0 +1,7 @@
@mixin font( $filename, $familyname, $weight: 400, $style: 700 )
@font-face
src: url("/assets/fonts/#{$filename}.otf") format("otf")
src: url("/assets/fonts/#{$filename}.otf") format("otf"), url("/assets/fonts/#{$filename}.woff") format("woff"), url("/assets/fonts/#{$filename}.woff2") format("woff2")
font-weight: $weight
font-style: $style
font-family: $familyname

View File

@ -0,0 +1,131 @@
$background-color: #efefef
body
// Override browser default padding and margin
margin: 0
padding: 0
// Set font globally
font-family: 'Helvetica', 'Arial', sans-serif
font-weight: 300
font-display: swap
// Sticky the footer
display: flex
min-height: 100vh
flex-direction: column
// Backgorund color
background: $background-color
// Box sizing
*
box-sizing: border-box
h1
font-size: 2rem
h2
font-size: 1.1rem
main
// make footer sticky
flex: 1 0 auto
// Vertically align content
display: flex
align-items: center
justify-content: flex-start
flex-direction: column
#footer
// make the copyright notics less in your face
text-align: center
padding: 10px
opacity: 0.6
font-size: 13px
// ///////////////////////////////
// Index page
// ///////////////////////////////
#index
section
position: relative
display: flex
flex-direction: column
align-items: center
justify-content: center
width: 100%
min-height: 300px
padding: 1rem
margin: 3rem 0
&:first-child
margin-top: 0
.stretch
position: absolute
z-index: -1
overflow: hidden
min-width: 100%
min-height: 100%
.circle
border-radius: 50%
height: 100%
width: 100%
margin-bottom: 3rem
#hero
padding: 2rem 1rem
width: 100%
height: 99vh
max-height: 100vh
h1, h2, a
background: rgba( 0,0,0,.9 )
color: white
padding: 1rem
text-align: center
#usps
flex-direction: row
flex-wrap: wrap
.usp
width: 300px
max-width: 100%
padding: 1rem
text-align: center
p
&:first-child
font-size: 4rem
margin: 0
&:nth-child( 2 )
font-size: 1.2rem
#ctabuttons
margin-top: 4rem
a
padding: 1rem
text-decoration: none
margin: 0 1rem
&:first-child
background: black
color: white
&:last-child
border: 2px solid black
color: black
#credits, #about
background: rgba( 0,0,0, .05 )
text-align: center
p
max-width: 400px
#cta
#compressed
height: 200px
width: 200px
a
background: black
color: white
padding: 1rem
text-decoration: none

View File

58
website/src/index.pug Normal file
View File

@ -0,0 +1,58 @@
//- --------------------------------
//- HiPew settings
//- --------------------------------
//- This is pace-specific metadata
-
const page = {
title: 'Home', desc: 'Live demo of the Hi Pew high performance static website boilerplate.', url: '/',
published: '2020-02-11', featuredimg: '/assets/mike-kiev-Opzk_hvwO9Q-unsplash.jpg'
}
//- This is a file with helpers
include pug/_helpers.pug
//- --------------------------------
//- Webpage source code
//- --------------------------------
doctype html
html( lang=content.lang )
include pug/_header.pug
body#index
main
section#hero
h1 Rocketeer NFT
h2 10,000,000 possible combinations, 0 price.
a( target='_blank' href='' ) Mint now
+rimg.stretch( src=site.system.url+'/assets/mike-kiev-Opzk_hvwO9Q-unsplash.jpg' )
section#usps
div.usp
p 🕶
p 5 visors in 20 possible colors
div.usp
p 👔
p 9 suits in 10 colors
div.usp
p 🏔
p 20 backdrops in 5 colors
div#ctabuttons
a( href='/' ) Mint now
a( href='/' ) View on Opensea
section#credits
h1 Acknowledgements
p Designs by unvetica.eth, code by mentor.eth, logistical support from colfax and LookingForOwls.
p Photos taken by Mike Kiev and Adam Miller.
section#cta
+cimg.circle#compressed( size=200 src=site.system.url+'/assets/adam-miller-dBaz0xhCkPY-unsplash-cropped.jpg' )
a( href='/' ) Mint now
section#about
h1 About this project
p This is a Rocketpool community led initiative. There is no grand idea behind it, other than to create avatar-worthy Rocketeer images for the Rocketpool Discord members.
p We all hope to look back 10 years from now at our Rocketeers and remember the days when we were still wondering wen mainnet.
include pug/_footer.pug

3
website/src/js/main.js Normal file
View File

@ -0,0 +1,3 @@
import { log } from './modules/helpers'
log( 'I am alive' )

View File

@ -0,0 +1,21 @@
// Selectors
export const q = query => document.querySelector( query )
export const qa = query => document.querySelectorAll( query )
// Debuggins
export const log = ( ...messages ) => {
try {
const { NODE_ENV } = process.env
const { href } = location || {}
if( NODE_ENV == 'development' || ( href && href.includes( 'debug' ) ) ) console.log( ...messages )
} catch( e ) {
console.log( 'Error in logger: ', e )
console.log( ...messages )
}
}

View File

@ -0,0 +1,22 @@
export const getQuery = variable => {
let query = window.location.search.substring(1)
let vars = query.split("&").map( pair => pair.split("=") )
const match = vars.find( pair => pair[0] == variable )
return match ? match[1] : undefined
}
export const getQueries = queries => {
const matches = {}
for( let query of queries ) {
matches[query] = getQuery( query )
}
return matches
}
export const setQuery = query => {
const { protocol, host, pathname } = window.location
window.history.pushState( { dummy: 'data'}, "", `${protocol}//${host}${pathname}${ query ? `?${query}` : `` }`);
}

View File

@ -0,0 +1,45 @@
//- --------------------------------
//- HiPew settings
//- --------------------------------
//- This is pace-specific metadata
-
const page = {
title: 'Subpage', desc: 'Live demo of the Hi Pew high performance static website boilerplate.', url: '/',
published: '2020-02-11', featuredimg: '/assets/robert-katzki-jbtfM0XBeRc-unsplash.jpg'
}
//- This is a file with helpers
include ../pug/_helpers.pug
//- --------------------------------
//- Webpage source code
//- --------------------------------
doctype html
html( lang=content.lang )
include ../pug/_header.pug
body#index
main
section#hero
h1 👋 🔫 HI PEW
h2 Subpage example
a( target='_blank' href='https://developers.google.com/speed/pagespeed/insights/?url=https://actuallymentor.github.io/hi-pew/' ) View Pagespeed Score
+rimg.stretch( src=site.system.url+'/assets/marc-olivier-jodoin-NqOInJ-ttqM-unsplash.jpg' )
section#usps
div.usp
p 🚀
p 100% Google Pagespeed score
div.usp
p 👓
p Easy to configure SEO best-practices
div.usp
p 🧪
p Advanced configuration options for geeks
section#cta
+cimg.circle#compressed( size=120 src=site.system.url+'/assets/robert-katzki-jbtfM0XBeRc-unsplash.jpg' )
a( href='https://github.com/actuallymentor/hi-pew/' ) 👨‍💻 View README on Github
include ../pug/_footer.pug

View File

@ -0,0 +1,50 @@
footer#footer
div © Copyright #{site.system.year} #{site.author.firstname} #{site.author.lastname}
div( style='margin-top: 20px' ) Made with
a( href='https://github.com/actuallymentor/hi-pew/' ) hi-pew
script( type="application/ld+json" ).
{
"@context": "http://schema.org",
"@type": "Article",
"mainEntityOfPage": {
"@type": "WebPage",
"@id": "#{page.url}"
},
"headline": "#{page.title}",
"image": {
"@type": "ImageObject",
"url": "#{site.system.url}#{page.featuredimg}",
"height": "696px",
"width": "696px"
},
"datePublished": "#{page.published}",
"dateModified": "#{page.updated}",
"author": {
"@type": "Person",
"name": "#{site.author.firstname} #{site.author.lastname}",
"url": "#{site.author.url}"
},
"publisher": {
"@type": "Organization",
"name": "#{site.identity.title}",
"url": "#{site.system.url}",
"logo": {
"@type": "ImageObject",
"url": "#{site.identity.logo}",
"width": "60px",
"height": "600px"
}
},
"description": "#{page.desc}"
}
//- Googe tag manager
if site.track.gtm
script.
(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start':
new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0],
j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src=
'https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f);
})(window,document,'script','dataLayer', '!{ site.track.gtm }');
noscript.
<iframe src='https://www.googletagmanager.com/ns.html?id=!{site.track.gtm}' height="0" width="0" style="display:none;visibility:hidden"></iframe>

View File

@ -0,0 +1,55 @@
head
meta( charset='utf-8' )
meta( http-equiv="x-ua-compatible" content="ie=edge" )
meta( name="viewport" content="width=device-width, initial-scale=1" )
base( href=site.system.url )
meta( name="robots" content="index,follow,noodp" )
meta( name="googlebot" content="index,follow" )
if site.track.gverification
meta( name="google-site-verification" content=site.track.gverification )
//- Regular meta tags
title !{site.identity.title} - #{page.title}
meta( name="description" content=site.identity.desc )
//- Facebook tags
meta( property="og:site_name" content=site.identity.title )
meta( property="og:title" content=site.identity.title + ' - ' + page.title )
meta( property="og:description" content=page.desc )
meta( property="article:published_time" content=page.published)
meta( property="og:type" content="article" )
meta( property="og:url" content=page.url )
meta( property="og:image" content=page.featuredimg)
meta( property="fb:admins" content=site.author.facebook )
meta( property="profile:first_name" content=site.author.firstname)
meta( property="profile:last_name" content=site.author.lastname)
//- Twitter tags
meta( name="twitter:site" content=site.author.twitter )
meta( name="twitter:title" content=site.identity.title + ' - ' + page.title )
meta( name="twitter:description" content=page.desc)
meta( name="twitter:image" content=page.featuredimg)
meta( name="name=twitter:image:alt" content=page.title )
meta( name="twitter:card" content=page.desc )
//- Load essential css
style( type="text/css" charset="utf-8" )!=css
//- Load the main css stylesheet if there is no js support
noscript: link( rel="stylesheet" type="text/css" href=site.system.url + 'assets/css/styles.css?t=' + site.system.timestamp )
//- Load the main css stylesheet if there is js support
//- Async loading of css and fonts
script( type="application/javascript" ).
const loadAsync = ( url, mime ) => {
let link = document.createElement( "link" )
link.href = url
link.type = mime || ""
link.rel = "stylesheet"
document.getElementsByTagName( "head" )[0].appendChild( link )
}
loadAsync( "#{ site.system.url + 'assets/css/styles.css?t=' + site.system.timestamp }", "text/css" )
// loadAsync( "https://fonts.googleapis.com/css?family=Roboto:300&display=swap", "" )
//- Get the webpack script
script( src=site.system.url + `assets/js/app-${site.system.timestamp}.js` , async)

View File

@ -0,0 +1,63 @@
//- Responsive image tag
mixin rimg( )
picture.rimg( class!=attributes.class )
//- Shared definitions
-
const [ full, path, filename, dot, extension ] = attributes.src.match( /(.*\/)(.*)(\.)(.*)/ )
const sizes = site.system.images.sizes.reduce( ( acc, size ) => {
return acc += `( max-width: ${ size }px ) ${ size }px, `
},'' )
//- Source tag for avif
-
const avifsrcset = site.system.images.sizes.reduce( ( acc, size ) => {
return acc += `${ path }${ filename }-${ size }.avif ${ size }w, `
},'' )
source( type="image/avif" srcset=avifsrcset class!=attributes.class )
//- Source tag for webp
-
const webpsrcset = site.system.images.sizes.reduce( ( acc, size ) => {
return acc += `${ path }${ filename }-${ size }.webp ${ size }w, `
},'' )
source( type="image/webp" srcset=webpsrcset class!=attributes.class )
//- Img tag for jpg
-
const srcset = site.system.images.sizes.reduce( ( acc, size ) => {
return acc += `${ path }${ filename }-${ size }.jpg ${ size }w, `
},'' )
img( src=attributes.src srcset=srcset sizes=sizes alt=attributes.alt class!=attributes.class )
//- Compressed image tag
mixin cimg( )
picture.cimg( class!=attributes.class id!=attributes.id )
//- Shared definitions
-
const [ full, path, filename, dot, extension ] = attributes.src.match( /(.*\/)(.*)(\.)(.*)/ )
const sortedSizes = site.system.images.sizes.sort( ( a, b ) => {
if( a > b ) return -1
if( a < b ) return 1
else return 0
} )
let closestSize = sortedSizes.find( availablesize => availablesize <= attributes.size )
const test = closestSize
if( !closestSize ) closestSize = sortedSizes[ sortedSizes.length - 1 ]
const closestSrc = `${ path }${ filename }-${ closestSize }`
source( type="image/avif" src=closestSrc + '.avif' class!=attributes.class )
source( type="image/webp" src=closestSrc + '.webp' class!=attributes.class )
img( src=closestSrc + '.jpg' alt=attributes.alt class!=attributes.class )

25
website/test/links.js Normal file
View File

@ -0,0 +1,25 @@
import expect from './modules/expect'
import { promises as fs } from 'fs'
import { urls, isBroken, getLinks } from './modules/linkman'
describe( 'Links in the source files', function( ) {
this.timeout( 1000 * 60 * 10 )
it( 'are all valid', async function() {
let linksWithFile = await getLinks( `${__dirname}/../src` )
if( process.env.verbose ) console.log( 'Validating ', linksWithFile.length, ' links' )
const broken = await Promise.all( linksWithFile.map( link => isBroken( link ) ) )
const filtered = broken.filter( notfalse => notfalse )
if( process.env.verbose && filtered.length > 0 ) await fs.writeFile( `${__dirname}/../broken-links.json`, JSON.stringify( filtered, null, 2 ) )
if( process.env.verbose && filtered.length > 0 ) console.log( filtered.length , ' links are broken' )
return filtered.should.have.length( 0 )
} )
} )

View File

@ -0,0 +1,7 @@
import chai from 'chai'
import aspromised from 'chai-as-promised'
chai.use( aspromised )
chai.should()
export default chai.expect

View File

@ -0,0 +1,77 @@
import dir from 'recursive-readdir'
import { normalize } from 'path'
import { promises as fs } from 'fs'
import request from 'request-promise-native'
// Fallback request
const get = async link => {
// If it has a protocol:
if( !link.url.match( /^\/\// ) ) return request( {
uri: link.url,
resolveWithFullResponse: true,
headers: {
'User-Agent': 'Chrome/79.0.3945.117'
}
} )
// If it has no protocol
console.log( `https:${ link.url }` )
// Try https
const https = await request( {
uri: `https:${ link.url }`,
resolveWithFullResponse: true,
headers: {
'User-Agent': 'Chrome/79.0.3945.117'
}
} ).catch( e => false )
if( https ) return https
console.log( 'Https didnt bite' )
// Otherwise try http
const http = await request( {
uri: `http:${ link.url }`,
resolveWithFullResponse: true,
headers: {
'User-Agent': 'Chrome/79.0.3945.117'
}
} ).catch( e => false )
if( http ) return http
// If neither worked..
return { message: `Link has no protocol and doesn't respond on http or https` }
}
// Match all hrefs that have a // (external)
export const urls = str => Array.from( str.matchAll( /(?:href=(?:'|"))(.*?\/\/.*?)(?:"|')/g ), m => m[1] )
// CHeck if url is broken
export const isBroken = link => get( link )
.then( ( { statusCode } ) => statusCode == 200 ? false : { ...link, code: statusCode } )
.catch( ( { statusCode, name, message, ...other } ) => ( { ...link, code: statusCode || name || message || other } ) )
// Get links with files
export const getLinks = async path => {
// Get the paths to files
const paths = await dir( path, [ '*.png', '*.jpg', '*.pdf', '*.gif' ] )
// Get markdown and fix footnoe structure to match npm module syntax
const files = await Promise.all( paths.map( async path => ( {
path: normalize( path ), content: await fs.readFile( path, 'utf8' )
} ) ) )
const linksByFile = files.map( md => ( {
path: md.path,
urls: urls( md.content )
} ) )
let linksWithFile = linksByFile.map( file => {
return file.urls.map( url => ( { url: url, path: file.path } ) )
} ).flat()
return linksWithFile
}

123
website/webpack.config.js Normal file
View File

@ -0,0 +1,123 @@
// Browser sync stuff
const BrowserSyncPlugin = require( 'browser-sync-webpack-plugin' )
const bs = require( 'browser-sync' )
// Webpack and css
const webpack = require( 'webpack' )
// Workflow
const { watch } = require( 'fs' )
const { css } = require( __dirname + '/modules/publish-css' )
// Site config
const site = require( __dirname + '/modules/config' )
// Conversions
const publishpug = require( __dirname + '/modules/publish-pug' )
const publishassets = require( __dirname + '/modules/publish-assets' )
// Get environment variables
const dotenv = require( 'dotenv' )
const { NODE_ENV } = process.env
const dev = NODE_ENV == 'development'
// Helpers
const error = e => {
console.log( "\007" ) // Beep
console.error( "\x1b[31m", `\n 🛑 error: `, e && e.message || e, "\x1b[0m" )
}
// ///////////////////////////////
// Plugins
// ///////////////////////////////
let thebs
const servername = 'bsserver'
const bsconfig = {
host: 'localhost',
open: true,
cors: true,
port: 3000,
server: {
baseDir: [ site.system.public ],
serveStaticOptions: {
extensions: ['html']
}
},
notify: false
}
const bsyncplugconfig = {
name: servername,
callback: f => { thebs = bs.get( servername ) }
}
const envPlugin = new webpack.DefinePlugin( {
process: {
env: {
...JSON.stringify( dotenv.config().parsed ),
NODE_ENV: JSON.stringify( process.env.NODE_ENV )
}
}
} )
// ///////////////////////////////
// Watchers for non webpack files
// ///////////////////////////////
// Watch pug/sass
if ( dev ) watch( site.system.source, { recursive: true }, async ( eventType, filename ) => {
// Pug file was updated
if( filename.includes( 'pug' ) ) await publishpug( site, filename ).catch( error )
// Sass file was updated, rebuild sass and pug files
else if ( filename.includes( 'sass' ) || filename.includes( 'scss' ) ) {
if( filename.includes( 'essential-above-the-fold' ) ) await publishpug( site ).catch( error )
await css( site ).catch( error )
}
// Reload browser after every change
thebs.reload()
} )
// Watch asset folder
if ( dev ) watch( `${ site.system.source }/assets`, { recursive: true }, async ( eventType, filename ) => {
// Republish assets
await publishassets( site, filename ).catch( error )
// Reload browser after every change
thebs.reload()
} )
module.exports = async f => {
await Promise.all( [ publishpug( site ), publishassets( site ), css( site ) ] )
return {
entry: site.system.source + 'js/main.js',
mode: NODE_ENV,
output: {
filename: `app-${site.system.timestamp}.js`,
path: `${site.system.public}assets/js/`
},
module: {
rules: [
{
test: /\.js$/,
exclude: /node_modules/,
use: {
loader: 'babel-loader'
}
}
]
},
devtool: NODE_ENV == 'production' ? false : 'inline-source-map',
plugins: NODE_ENV == 'production' ? [ envPlugin ] : [ envPlugin, new BrowserSyncPlugin( bsconfig, bsyncplugconfig ) ]
}
}