--- /dev/null
+# This is a basic workflow that is manually triggered
+
+name: Manual Doc build workflow
+
+# Controls when the action will run. Workflow runs when manually triggered using the UI
+# or API.
+on:
+ workflow_dispatch:
+ # Inputs the workflow accepts.
+
+
+# A workflow run is made up of one or more jobs that can run sequentially or in parallel
+jobs:
+ # This workflow contains a single job called "greet"
+ build_blog_site:
+ # The type of runner that the job will run on
+ runs-on: [ ubuntu-22.04-self-hosted ]
+# runs-on: [ self-hosted ]
+
+ # Steps represent a sequence of tasks that will be executed as part of the job
+ steps:
+ # Runs a single command using the runners shell
+ - name: Show auth
+ run: gcloud auth list
+ - name: store secrets file
+ run: |
+ cat << EOF > /tmp/key.json
+ ${{ secrets.SERVICE_ACCOUNT_KEY }}
+ EOF
+ - name: activate service account
+ run: gcloud auth activate-service-account --key-file=/tmp/key.json
+ - name: Show auth
+ run: gcloud auth list
+ - name: Trigger doc build
+ run: gcloud run jobs execute pu-cdn-build-doc-job-production --region europe-north1 --wait
+ - name: Remove service account
+ run: gcloud auth revoke
+ - name: Remove access key file
+ run: rm -f /tmp/key.json
--- /dev/null
+###
+name: Automatic web build
+
+# Controls when the action will run. Workflow runs when manually triggered using the UI
+# or API.
+on:
+ push:
+ branches:
+ - master
+ - automation
+jobs:
+ # This workflow contains a single job called "greet"
+ build_web_site:
+ # The type of runner that the job will run on
+ runs-on: [ ubuntu-22.04-self-hosted ]
+
+ # Steps represent a sequence of tasks that will be executed as part of the job
+ steps:
+ # Runs a single command using the runners shell
+ - name: Show available accounts
+ run: gcloud auth list
+ - name: Store service account key file
+ run: |
+ cat << EOF > /tmp/key.json
+ ${{ secrets.SERVICE_ACCOUNT_KEY }}
+ EOF
+ - name: Activate service account
+ run: gcloud auth activate-service-account --key-file=/tmp/key.json
+ - name: Show available accounts
+ run: gcloud auth list
+ - name: Trigger web build
+ run: gcloud run jobs execute pu-cdn-build-web-job-production --region europe-north1 --wait
+ - name: Remove service account
+ run: gcloud auth revoke
+ - name: Remove service account key file
+ run: rm -f /tmp/key.json
+
parser = OptionParser()
parser.add_option("-b", "--base", help="major version to filter on", dest="base")
-parser.add_option("-i", "--inputdirectory", help="directory of json files", dest="directory")
+parser.add_option(
+ "-i", "--inputdirectory", help="directory of json files", dest="directory"
+)
(options, args) = parser.parse_args()
-def natural_sort_key(s, _nsre=re.compile('([0-9]+)')):
- return [int(text) if text.isdigit() else text.lower()
- for text in _nsre.split(s)]
+def natural_sort_key(s, _nsre=re.compile("([0-9]+)")):
+ return [int(text) if text.isdigit() else text.lower() for text in _nsre.split(s)]
def getbasefor(fixedin):
- dotparts = re.search('^(\d)\.(\d)\.(\d)', fixedin)
+ dotparts = re.search("^(\d)\.(\d)\.(\d)", fixedin)
if not dotparts:
return None
if int(dotparts.group(1)) < 3:
print("Ignoring due to error parsing: " + options.directory + x)
continue
-# Filter on version
+# Filter on version
# We want to sort on reverse date then cve name
statements = ""
disputedcve = {}
for cve in cves:
if "statements" in cve:
for statement in cve["statements"]:
- if (statement["base"] in (options.base or "none")):
+ if statement["base"] in (options.base or "none"):
statements += "<p>" + statement["text"].strip() + "</p>"
if "disputed" in cve:
for dispute in cve["disputed"]:
fixedbase = getbasefor(fixedin)
if fixedbase and fixedbase not in allbase:
allbase.append(fixedbase)
- if (fixedin.startswith(base)):
+ if fixedin.startswith(base):
datepublic = cna["datePublic"] + "-" + cveid
entries[datepublic] = cve
allyears = []
allissues = ""
for k, cve in sorted(entries.items(), reverse=True):
- year = k.split('-')[0]
+ year = k.split("-")[0]
- if (lastyear != year):
- if (lastyear != ""):
- allissues += "</dl>";
- allissues += "<h3><a name=\"y%s\">%s</a></h3>\n<dl>" % (year, year)
+ if lastyear != year:
+ if lastyear != "":
+ allissues += "</dl>"
+ allissues += '<h3><a name="y%s">%s</a></h3>\n<dl>' % (year, year)
allyears.append(year)
lastyear = year
allissues += "<dt>"
# CVE name
if cve:
- allissues += "<a href=\"https://www.cve.org/CVERecord?id=%s\" name=\"%s\">%s</a> " % (cveid, cveid, cveid)
+ allissues += (
+ '<a href="https://www.cve.org/CVERecord?id=%s" name="%s">%s</a> '
+ % (cveid, cveid, cveid)
+ )
- # Advisory (use the title instead of openssl advisory)
+ # Advisory (use the title instead of openssl advisory)
title = "(OpenSSL Advisory)"
refs = ""
if "title" in cna:
- title = cna['title']
+ title = cna["title"]
refs = title
for ref in cna["references"]:
if "tags" in ref:
url = ref["url"]
if url.startswith("https://www.openssl.org/news/"):
url = url.replace("https://www.openssl.org/news/", "")
- refs = "<a href=\"" + url + "\">" + title + "</a>"
+ refs = '<a href="' + url + '">' + title + "</a>"
allissues += " " + refs
# Impact
impact = metric["other"]["content"]["text"]
if not "unknown" in impact:
metric_url = metric["other"]["type"]
- if metric["other"]["type"].startswith("https://www.openssl.org/policies/"):
+ if metric["other"]["type"].startswith(
+ "https://www.openssl.org/policies/"
+ ):
metric_url = metric_url.replace("https://www.openssl.org/", "../")
- allissues += f" <a href=\"{metric_url}\">[{impact} severity]</a>"
+ allissues += f' <a href="{metric_url}">[{impact} severity]</a>'
# Date
datepublic = cna["datePublic"]
- t = datetime.datetime(int(datepublic[:4]), int(datepublic[5:7]), int(datepublic[8:10]), 0, 0)
+ t = datetime.datetime(
+ int(datepublic[:4]), int(datepublic[5:7]), int(datepublic[8:10]), 0, 0
+ )
allissues += t.strftime(" %d %B %Y: ")
- allissues += "<a href=\"#toc\"><img src=\"../img/up.gif\"/></a></dt>\n<dd>"
+ allissues += '<a href="#toc"><img src="../img/up.gif"/></a></dt>\n<dd>'
# Description
for desc in cna["descriptions"]:
# Trailing \n's are ignored, double \n are paragraph breaks
- allissues += desc["value"].rstrip('\n').replace('\n\n', "</dd><dd>")
+ allissues += desc["value"].rstrip("\n").replace("\n\n", "</dd><dd>")
# Credits
- if ("credits" in cna):
+ if "credits" in cna:
allissues += "</dd><dd>"
for credit in cna["credits"]:
creditprefix = " Found by "
if "type" in credit and "remediation dev" in credit["type"]:
creditprefix = " Fix developed by "
- elif "type" in credit and ("finder" not in credit["type"] and "reporter" not in credit["type"]):
+ elif "type" in credit and (
+ "finder" not in credit["type"] and "reporter" not in credit["type"]
+ ):
creditprefix = " Thanks to "
allissues += creditprefix + credit["value"] + "."
affects = []
product = cna["affected"][0]
- productname = product['product']
+ productname = product["product"]
allissues += "<ul>"
also = []
for ver in product["versions"]:
git = reference["url"]
if base:
- if (not earliest.startswith(base)):
- also.append("OpenSSL <a href=\"vulnerabilities-%s.html#%s\">%s</a>" % (getbasefor(earliest), cveid, fixedin))
+ if not earliest.startswith(base):
+ also.append(
+ 'OpenSSL <a href="vulnerabilities-%s.html#%s">%s</a>'
+ % (getbasefor(earliest), cveid, fixedin)
+ )
continue
- allissues += "<li>Affects %s up to and including OpenSSL %s " % (earliest, lastaffected)
- if (git != ""):
- issue = git.split('/')[-1]
+ allissues += "<li>Affects %s up to and including OpenSSL %s " % (
+ earliest,
+ lastaffected,
+ )
+ if git != "":
+ issue = git.split("/")[-1]
# it will process this url https://git.openssl.org/gitweb/?p=openssl.git;a=commitdiff;h=517a0e7fa0f5453c860a3aec17b678bd55d5aad7
# and rewrite it to https://github.com/openssl/openssl/commit/517a0e7fa0f5453c860a3aec17b678bd55d5aad7
if git.startswith("https://git.openssl.org/"):
commitId = git.split(";")[-1].split("=")[-1]
git = f"https://github.com/openssl/openssl/commit/{commitId}"
- allissues += "<a href=\"%s\">(fix in git commit)</a> " % (git)
+ allissues += '<a href="%s">(fix in git commit)</a> ' % (git)
allissues += "</li>"
if "lessThan" in ver:
fixedin = ver["lessThan"]
git = reference["url"]
if base:
- if (not earliest.startswith(base)):
- also.append("OpenSSL <a href=\"vulnerabilities-%s.html#%s\">%s</a>" % (getbasefor(earliest), cveid, fixedin))
+ if not earliest.startswith(base):
+ also.append(
+ 'OpenSSL <a href="vulnerabilities-%s.html#%s">%s</a>'
+ % (getbasefor(earliest), cveid, fixedin)
+ )
continue
allissues += "<li>Fixed in OpenSSL %s " % (fixedin)
- if (git != ""):
+ if git != "":
if git.startswith("https://git.openssl.org/"):
commitId = git.split(";")[-1].split("=")[-1]
git = f"https://github.com/openssl/openssl/commit/{commitId}"
- if (fixedin.startswith("1.0.2") and fixedin[5] >= 'w'): # 1.0.2w and above hack
- allissues += "<a href=\"/support/contracts.html?giturl=%s\">(premium support)</a> " % (git)
+ if (
+ fixedin.startswith("1.0.2") and fixedin[5] >= "w"
+ ): # 1.0.2w and above hack
+ allissues += (
+ '<a href="/support/contracts.html?giturl=%s">(premium support)</a> '
+ % (git)
+ )
else:
- allissues += "<a href=\"%s\">(git commit)</a> " % (git)
+ allissues += '<a href="%s">(git commit)</a> ' % (git)
allissues += "(Affected since " + earliest + ")"
allissues += "</li>"
if also:
preface = "<!-- do not edit this file it is autogenerated, edit vulnerabilities.xml -->"
bases = []
for base in allbase:
- if (options.base and base in options.base):
+ if options.base and base in options.base:
bases.append("%s" % (base))
else:
- bases.append("<a href=\"vulnerabilities-%s.html\">%s</a>" % (base, base))
+ bases.append('<a href="vulnerabilities-%s.html">%s</a>' % (base, base))
preface += "<p>Show issues fixed only in OpenSSL " + ", ".join(bases)
if options.base:
- preface += ", or <a href=\"vulnerabilities.html\">all versions</a></p>"
+ preface += ', or <a href="vulnerabilities.html">all versions</a></p>'
preface += "<h2>Fixed in OpenSSL %s</h2>" % (options.base)
else:
preface += "</p>"
preface += statements
-if len(allyears) > 1: # If only vulns in this year no need for the year table of contents
- preface += "<p><a name=\"toc\">Jump to year: </a>" + ", ".join("<a href=\"#y%s\">%s</a>" % (year, year) for year in allyears)
+if len(allyears) > 1:
+ # If only vulns in this year no need for the year table of contents
+ preface += '<p><a name="toc">Jump to year: </a>' + ", ".join(
+ '<a href="#y%s">%s</a>' % (year, year) for year in allyears
+ )
preface += "</p>"
if allissues != "":
preface += allissues + "</dl>"
nonissues = ""
for nonissue in disputedcve:
- if (not options.base or disputedcve[nonissue]["base"] in (options.base or "none")):
- nonissues += "<li><a href=\"https://www.cve.org/CVERecord?id=%s\" name=\"%s\">%s</a>: " % (nonissue, nonissue, nonissue)
+ if not options.base or disputedcve[nonissue]["base"] in (options.base or "none"):
+ nonissues += (
+ '<li><a href="https://www.cve.org/CVERecord?id=%s" name="%s">%s</a>: '
+ % (nonissue, nonissue, nonissue)
+ )
nonissues += disputedcve[nonissue]["text"]
nonissues += "</li>"
-if (nonissues != ""):
+if nonissues != "":
preface += "<h3>Not Vulnerabilities</h3><ul>" + nonissues + "</ul>"
-sys.stdout.reconfigure(encoding='utf-8')
+sys.stdout.reconfigure(encoding="utf-8")
sys.stdout.write(preface)
#! /bin/bash
+. ./bin/utils.sh
+
HERE=$(
- cd $(dirname $0)
- pwd
+ cd "$(dirname_custom $0)" || exit
+ pwd
)
THIS=$(basename $0)
# Standard getopt calling sequence
if ! TEMP=$(getopt -o "$shortopts" --long "$longopts" -n $THIS -- "$@"); then
- echo >&2 "$usage"
- exit 1
+ echo >&2 "$usage"
+ exit 1
fi
eval set -- "$TEMP"
unset TEMP
# Check the parsed options
while true; do
- case "$1" in
- '-d' | '--dir')
- dir="$2"
- shift 2
- continue
- ;;
- '-i' | '--input')
- input="$2"
- shift 2
- continue
- ;;
- '-o' | '--output')
- output="$2"
- shift 2
- continue
- ;;
- '-h' | '--help')
- echo >&2 "$usage"
- exit 0
- ;;
- '--')
- shift
- break
- ;;
- *)
- echo >&2 'Internal error!'
- echo >&2 "$usage"
- exit 1
- ;;
- esac
+ case "$1" in
+ '-d' | '--dir')
+ dir="$2"
+ shift 2
+ continue
+ ;;
+ '-i' | '--input')
+ input="$2"
+ shift 2
+ continue
+ ;;
+ '-o' | '--output')
+ output="$2"
+ shift 2
+ continue
+ ;;
+ '-h' | '--help')
+ echo >&2 "$usage"
+ exit 0
+ ;;
+ '--')
+ shift
+ break
+ ;;
+ *)
+ echo >&2 'Internal error!'
+ echo >&2 "$usage"
+ exit 1
+ ;;
+ esac
done
tpagecmd="tpage"
while true; do
- case "$1" in
- *=*)
- tpagecmd="$tpagecmd --define '$1'"
- shift
- ;;
- *)
- break
- ;;
- esac
+ case "$1" in
+ *=*)
+ tpagecmd="$tpagecmd --define '$1'"
+ shift
+ ;;
+ *)
+ break
+ ;;
+ esac
done
# If there are no other arguments, read from stdin, write to stdout.
# Otherwise, read from the input files and write to corresponding output files.
if [ $# -eq 0 ]; then
- if [ -z "$dir" ]; then
- echo >&2 'Directory must be set with -d / --dir in this mode'
- exit 1
- fi
- (
- cd $dir || exit
- (
- cat $HERE/../inc/common.tt
- if [ -n "$input" ]; then cat "$HERE/../$input"; else cat; fi
- ) |
- eval "$tpagecmd --define 'dir=$dir'" |
- (if [ -n "$output" ]; then cat >"$HERE/../$output"; else cat; fi)
- )
+ if [ -z "$dir" ]; then
+ echo >&2 'Directory must be set with -d / --dir in this mode'
+ exit 1
+ fi
+ (
+ cd $dir || exit
+ (
+ cat $HERE/../inc/common.tt
+ if [ -n "$input" ]; then cat "$HERE/../$input"; else cat; fi
+ ) |
+ eval "$tpagecmd --define 'dir=$dir'" |
+ (if [ -n "$output" ]; then cat >"$HERE/../$output"; else cat; fi)
+ )
else
- errfiles=
- nofiles=
- for f in "$@"; do
- base_with_ext="${f##*/}"
- base=${base_with_ext%.tt}
- # base=$(basename "$f" .tt)
+ errfiles=
+ nofiles=
+ for f in "$@"; do
+ base_with_ext="${f##*/}"
+ base=${base_with_ext%.tt}
+ # base=$(basename "$f" .tt)
- if [ "$base" = "$f" ]; then
- errfiles="$errfiles '$f'"
- elif [ ! -f "$f" ]; then
- nofiles="$nofiles '$f'"
- fi
- done
- if [ -n "$errfiles" ]; then
- echo >&2 "Files not ending with .tt:$errfiles"
- fi
- if [ -n "$nofiles" ]; then
- echo >&2 "Files no present:$nofiles"
- fi
- if [ -n "$errfiles" -o -n "$nofiles" ]; then
- exit 1
- fi
+ if [ "$base" = "$f" ]; then
+ errfiles="$errfiles '$f'"
+ elif [ ! -f "$f" ]; then
+ nofiles="$nofiles '$f'"
+ fi
+ done
+ if [ -n "$errfiles" ]; then
+ echo >&2 "Files not ending with .tt:$errfiles"
+ fi
+ if [ -n "$nofiles" ]; then
+ echo >&2 "Files no present:$nofiles"
+ fi
+ if [ -n "$errfiles" ] || [ -n "$nofiles" ]; then
+ exit 1
+ fi
- for f in "$@"; do
- base_with_ext="${f##*/}"
- base=${base_with_ext%.tt}
- filedir=$(dirname "$f")
+ for f in "$@"; do
+ base_with_ext="${f##*/}"
+ base=${base_with_ext%.tt}
+ filedir=$(dirname "$f")
- if [ "$f" != "$base" ]; then
- if ! (
- cd $filedir
- (
- cat $HERE/../inc/common.tt
- cat $base.tt
- ) |
- eval "$tpagecmd --define 'dir=${dir:-$filedir}'" \
- >$base
- ); then
- exit $?
- fi
- fi
- done
+ if [ "$f" != "$base" ]; then
+ if ! (
+ cd $filedir
+ (
+ cat $HERE/../inc/common.tt
+ cat $base.tt
+ ) |
+ eval "$tpagecmd --define 'dir=${dir:-$filedir}'" \
+ >$base
+ ); then
+ exit $?
+ fi
+ fi
+ done
fi
# \brief debug output
function _job_pool_echo() {
- if [[ ${job_pool_echo_command} == "1" ]]; then
- echo $@
- fi
+ if [[ ${job_pool_echo_command} == "1" ]]; then
+ echo $@
+ fi
}
# \brief cleans up
function _job_pool_cleanup() {
- rm -f ${job_pool_job_queue} ${job_pool_result_log}
+ rm -f ${job_pool_job_queue} ${job_pool_result_log}
}
# \brief signal handler
function _job_pool_exit_handler() {
- _job_pool_stop_workers
- _job_pool_cleanup
+ _job_pool_stop_workers
+ _job_pool_cleanup
}
# \brief print the exit codes for each command
# \param[in] result_log the file where the exit codes are written to
function _job_pool_print_result_log() {
- job_pool_nerrors=$(grep ^ERROR "${job_pool_result_log}" | wc -l)
- cat "${job_pool_result_log}" | sed -e 's/^ERROR//'
+ job_pool_nerrors=$(grep ^ERROR "${job_pool_result_log}" | wc -l)
+ cat "${job_pool_result_log}" | sed -e 's/^ERROR//'
}
# \brief the worker function that is called when we fork off worker processes
# \param[in] job_queue the fifo to read jobs from
# \param[in] result_log the temporary log file to write exit codes to
function _job_pool_worker() {
- local id=$1
- local job_queue=$2
- local result_log=$3
- local cmd=
- local args=
-
- exec 7<>${job_queue}
- while [[ ${cmd} != "${job_pool_end_of_jobs}" && -e ${job_queue} ]]; do
- # workers block on the exclusive lock to read the job queue
- flock --exclusive 7
- IFS=$'\v'
- read cmd args <${job_queue}
- set -- ${args}
- unset IFS
- flock --unlock 7
- # the worker should exit if it sees the end-of-job marker or run the
- # job otherwise and save its exit code to the result log.
- if [[ ${cmd} == "${job_pool_end_of_jobs}" ]]; then
- # write it one more time for the next sibling so that everyone
- # will know we are exiting.
- echo "${cmd}" >&7
- else
- _job_pool_echo "### _job_pool_worker-${id}: ${cmd}"
- # run the job
- { ${cmd} "$@"; }
- # now check the exit code and prepend "ERROR" to the result log entry
- # which we will use to count errors and then strip out later.
- local result=$?
- local status=
- if [[ ${result} != "0" ]]; then
- status=ERROR
- fi
- # now write the error to the log, making sure multiple processes
- # don't trample over each other.
- exec 8<>${result_log}
- flock --exclusive 8
- _job_pool_echo "${status}job_pool: exited ${result}: ${cmd} $@" >>${result_log}
- flock --unlock 8
- exec 8>&-
- _job_pool_echo "### _job_pool_worker-${id}: exited ${result}: ${cmd} $@"
- fi
- done
- exec 7>&-
+ local id=$1
+ local job_queue=$2
+ local result_log=$3
+ local cmd=
+ local args=
+
+ exec 7<>${job_queue}
+ while [[ ${cmd} != "${job_pool_end_of_jobs}" && -e ${job_queue} ]]; do
+ # workers block on the exclusive lock to read the job queue
+ flock --exclusive 7
+ IFS=$'\v'
+ read cmd args <${job_queue}
+ set -- ${args}
+ unset IFS
+ flock --unlock 7
+ # the worker should exit if it sees the end-of-job marker or run the
+ # job otherwise and save its exit code to the result log.
+ if [[ ${cmd} == "${job_pool_end_of_jobs}" ]]; then
+ # write it one more time for the next sibling so that everyone
+ # will know we are exiting.
+ echo "${cmd}" >&7
+ else
+ _job_pool_echo "### _job_pool_worker-${id}: ${cmd}"
+ # run the job
+ { ${cmd} "$@"; }
+ # now check the exit code and prepend "ERROR" to the result log entry
+ # which we will use to count errors and then strip out later.
+ local result=$?
+ local status=
+ if [[ ${result} != "0" ]]; then
+ status=ERROR
+ fi
+ # now write the error to the log, making sure multiple processes
+ # don't trample over each other.
+ exec 8<>${result_log}
+ flock --exclusive 8
+ _job_pool_echo "${status}job_pool: exited ${result}: ${cmd} $@" >>${result_log}
+ flock --unlock 8
+ exec 8>&-
+ _job_pool_echo "### _job_pool_worker-${id}: exited ${result}: ${cmd} $@"
+ fi
+ done
+ exec 7>&-
}
# \brief sends message to worker processes to stop
function _job_pool_stop_workers() {
- # send message to workers to exit, and wait for them to stop before
- # doing cleanup.
- echo ${job_pool_end_of_jobs} >>${job_pool_job_queue}
- wait
+ # send message to workers to exit, and wait for them to stop before
+ # doing cleanup.
+ echo ${job_pool_end_of_jobs} >>${job_pool_job_queue}
+ wait
}
# \brief fork off the workers
# \param[in] job_queue the fifo used to send jobs to the workers
# \param[in] result_log the temporary log file to write exit codes to
function _job_pool_start_workers() {
- local job_queue=$1
- local result_log=$2
- for ((i = 0; i < job_pool_pool_size; i++)); do
- _job_pool_worker ${i} ${job_queue} ${result_log} &
- done
+ local job_queue=$1
+ local result_log=$2
+ for ((i = 0; i < job_pool_pool_size; i++)); do
+ _job_pool_worker ${i} ${job_queue} ${result_log} &
+ done
}
################################################################################
# \param[in] pool_size number of parallel jobs allowed
# \param[in] echo_command 1 to turn on echo, 0 to turn off
function job_pool_init() {
- local pool_size=$1
- local echo_command=$2
+ local pool_size=$1
+ local echo_command=$2
- # set the global attibutes
- job_pool_pool_size=${pool_size:=1}
- job_pool_echo_command=${echo_command:=0}
+ # set the global attibutes
+ job_pool_pool_size=${pool_size:=1}
+ job_pool_echo_command=${echo_command:=0}
- # create the fifo job queue and create the exit code log
- rm -rf ${job_pool_job_queue} ${job_pool_result_log}
- mkfifo ${job_pool_job_queue}
- touch ${job_pool_result_log}
+ # create the fifo job queue and create the exit code log
+ rm -rf ${job_pool_job_queue} ${job_pool_result_log}
+ mkfifo ${job_pool_job_queue}
+ touch ${job_pool_result_log}
- # fork off the workers
- _job_pool_start_workers ${job_pool_job_queue} ${job_pool_result_log}
+ # fork off the workers
+ _job_pool_start_workers ${job_pool_job_queue} ${job_pool_result_log}
}
# \brief waits for all queued up jobs to complete and shuts down the job pool
function job_pool_shutdown() {
- _job_pool_stop_workers
- _job_pool_print_result_log
- _job_pool_cleanup
+ _job_pool_stop_workers
+ _job_pool_print_result_log
+ _job_pool_cleanup
}
# \brief run a job in the job pool
function job_pool_run() {
- if [[ ${job_pool_pool_size} == "-1" ]]; then
- job_pool_init
- fi
- printf "%s\v" "$@" >>${job_pool_job_queue}
- echo >>${job_pool_job_queue}
+ if [[ ${job_pool_pool_size} == "-1" ]]; then
+ job_pool_init
+ fi
+ printf "%s\v" "$@" >>${job_pool_job_queue}
+ echo >>${job_pool_job_queue}
}
# \brief waits for all queued up jobs to complete before starting new jobs
# This function actually fakes a wait by telling the workers to exit
# when done with the jobs and then restarting them.
function job_pool_wait() {
- _job_pool_stop_workers
- _job_pool_start_workers ${job_pool_job_queue} ${job_pool_result_log}
+ _job_pool_stop_workers
+ _job_pool_start_workers ${job_pool_job_queue} ${job_pool_result_log}
}
#########################################
# End of Job Pool
. ./bin/utils.sh
HERE=$(
- cd $(dirname_custom $0)
- pwd
+ cd "$(dirname_custom $0)" || exit
+ pwd
)
THIS=$(basename_custom $0)
# Standard getopt calling sequence
if ! TEMP=$(getopt -o "$shortopts" --long "$longopts" -n $THIS -- "$@"); then
- echo >&2 "$usage"
- exit 1
+ echo >&2 "$usage"
+ exit 1
fi
eval set -- "$TEMP"
unset TEMP
# Check the parsed options
while true; do
- case "$1" in
- '-o' | '--output')
- output="$2"
- shift 2
- ;;
- '-i' | '--index')
- index=1
- shift
- ;;
- '-h' | '--help')
- echo >&2 "$usage"
- exit 0
- ;;
- '--')
- shift
- break
- ;;
- *)
- echo >&2 'Internal error!'
- echo >&2 "$usage"
- exit 1
- ;;
- esac
+ case "$1" in
+ '-o' | '--output')
+ output="$2"
+ shift 2
+ ;;
+ '-i' | '--index')
+ index=1
+ shift
+ ;;
+ '-h' | '--help')
+ echo >&2 "$usage"
+ exit 0
+ ;;
+ '--')
+ shift
+ break
+ ;;
+ *)
+ echo >&2 'Internal error!'
+ echo >&2 "$usage"
+ exit 1
+ ;;
+ esac
done
######################################################################
# --output. Otherwise, read from the input files and write to corresponding
# output files.
if [ $# -eq 0 ]; then
- if [ -z "$output" ]; then
- echo >&2 'Output path must be set with -o / --output in this mode'
- exit 1
- fi
+ if [ -z "$output" ]; then
+ echo >&2 'Output path must be set with -o / --output in this mode'
+ exit 1
+ fi
- if [ "$(basename_custom "$output" .html)" = "$output" ]; then
- echo >&2 'Output path must end with .html'
- exit 1
- fi
+ if [ "$(basename_custom "$output" .html)" = "$output" ]; then
+ echo >&2 'Output path must end with .html'
+ exit 1
+ fi
- # Set '-' to mean stdin / stdout
- set -- -
+ # Set '-' to mean stdin / stdout
+ set -- -
elif [ -n "$output" ]; then
- echo >&2 '-o / --output is confusing in this mode'
- exit 1
+ echo >&2 '-o / --output is confusing in this mode'
+ exit 1
fi
# Check that all the arguments are existing and correctly named files
errfiles=
nofiles=
for f in "$@"; do
- [ "$f" = "-" ] && continue
+ [ "$f" = "-" ] && continue
- base=$(basename_custom "$f" md)
+ base=$(basename_custom "$f" md)
- if [ "$base" = "$f" ]; then
- errfiles="$errfiles '$f'"
- elif [ ! -f "$f" ]; then
- nofiles="$nofiles '$f'"
- fi
+ if [ "$base" = "$f" ]; then
+ errfiles="$errfiles '$f'"
+ elif [ ! -f "$f" ]; then
+ nofiles="$nofiles '$f'"
+ fi
done
if [ -n "$errfiles" ]; then
- echo >&2 "Files not ending with .md:$errfiles"
+ echo >&2 "Files not ending with .md:$errfiles"
fi
if [ -n "$nofiles" ]; then
- echo >&2 "Files no present:$nofiles"
+ echo >&2 "Files no present:$nofiles"
fi
-if [ -n "$errfiles" -o -n "$nofiles" ]; then
- exit 1
+if [ -n "$errfiles" ] || [ -n "$nofiles" ]; then
+ exit 1
fi
title_prefix=""
for f in "$@"; do
- if [ "$f" != "-" ]; then
- base=$(basename_custom "$f" md)
- dir=$(dirname "$f")
- input=$(grep "breadcrumb: " < $f);
- prefix=${input#"breadcrumb: "}
- if [ ! -z "$prefix" ]
- then
- title_prefix="[ ${prefix} ] - "
+ if [ "$f" != "-" ]; then
+ base=$(basename_custom "$f" md)
+ dir=$(dirname "$f")
+ input=$(grep "breadcrumb: " <$f)
+ prefix=${input#"breadcrumb: "}
+ if [ ! -z "$prefix" ]; then
+ title_prefix="[ ${prefix} ] - "
+ fi
+
+ if [ "$f" = "$base" ]; then
+ continue
+ fi
+ else
+ base=$(basename_custom "$output" html)
+ dir=$(dirname "$output")
+ fi
+
+ if [ "$dir" = "." ] || [ "$dir" = "" ]; then
+ title="/$base.html"
+ top=""
+ else
+ title="$title_prefix/$dir/$base.html"
+ top=$(echo "$dir" | sed -E -e 's|[^/]+|..|g')/
fi
- if [ "$f" = "$base" ]; then
- continue
+ # is it an index file?
+ def_isindex=
+ if [ -n "$index" ] || [ "$base" = "index" ]; then
+ def_isindex="-M is-index=true"
fi
- else
- base=$(basename_custom "$output" html)
- dir=$(dirname "$output")
- fi
-
- if [ "$dir" = "." -o "$dir" = "" ]; then
- title="/$base.html"
- top=""
- else
- title="$title_prefix/$dir/$base.html"
- top=$(echo "$dir" | sed -E -e 's|[^/]+|..|g')/
- fi
-
- # is it an index file?
- def_isindex=
- if [ -n "$index" -o "$base" = "index" ]; then
- def_isindex="-M is-index=true"
- fi
-
- # metadata
- meta_file=""
- input_file=""
- meta="$dir/dirdata.yaml"
- if [ -f "$meta" ]; then
- meta_file="$HERE/../$meta"
- fi
-
- if [ "$f" = "-" ]; then
- input_file="-"
- else
- input_file="$HERE/../$f"
- fi
-
- pandoc -t html5 -f markdown --template="$template" \
- --highlight-style="$highlightstyle" \
- --tab-stop=8 --shift-heading-level-by=1 \
- -M author-meta='OpenSSL Foundation, Inc.' \
- -M lang=en \
- -M pagetitle="$title" \
- $def_isindex -M top="$top" -o "$dir/$base.html" $meta_file $input_file
+
+ # metadata
+ meta_file=""
+ input_file=""
+ meta="$dir/dirdata.yaml"
+ if [ -f "$meta" ]; then
+ meta_file="$HERE/../$meta"
+ fi
+
+ if [ "$f" = "-" ]; then
+ input_file="-"
+ else
+ input_file="$HERE/../$f"
+ fi
+
+ pandoc -t html5 -f markdown --template="$template" \
+ --highlight-style="$highlightstyle" \
+ --tab-stop=8 --shift-heading-level-by=1 \
+ -M author-meta='OpenSSL Foundation, Inc.' \
+ -M lang=en \
+ -M pagetitle="$title" \
+ $def_isindex -M top="$top" -o "$dir/$base.html" $meta_file $input_file
done
cd $dir || exit
for m in $(find . -name '*.md.tt' | sort); do
-# description=$(grep '^OSSL-description:' $m | sed -e 's|^[^:]*: *||' -e 's|^ *"||' -e 's|" *||')
+ # description=$(grep '^OSSL-description:' $m | sed -e 's|^[^:]*: *||' -e 's|^ *"||' -e 's|" *||')
description=$(get_description $m)
# If there isn't a description, it isn't a manpage and should not be
# included
if [ "$description" = "" ]; then
- continue
+ continue
fi
manfile=$(basename_custom $m)
manname=$(basename_custom $manfile md.tt)
#!/bin/bash
-mkdir checkouts
-if [ ! -d "checkouts/data" ]; then
- gh repo clone git@github.openssl.org:omc/data.git checkouts/data -- --depth 1
-else
- cd checkouts/data
- echo -n "checkouts/data - "
- git pull
- cd ../..
-fi
-if [ ! -d "checkouts/general-policies" ]; then
- gh repo clone https://github.com/openssl/general-policies.git checkouts/general-policies -- --depth=1
-else
- cd checkouts/general-policies
- echo -n "checkouts/general-policies - "
- git pull
- cd ../..
-fi
-if [ ! -d "checkouts/technical-policies" ]; then
- gh repo clone https://github.com/openssl/technical-policies.git checkouts/technical-policies -- --depth=1
-else
- cd checkouts/technical-policies
- echo -n "checkouts/technical-policies - "
- git pull
- cd ../..
-fi
-if [ ! -d "checkouts/openssl" ]; then
- gh repo clone https://github.com/openssl/openssl.git checkouts/openssl -- --depth=1 --branch master
-else
- cd checkouts/openssl
- echo -n "checkouts/openssl - "
- git pull
- cd ../..
-fi
-if [ ! -d "checkouts/openssl-3.2" ]; then
- gh repo clone https://github.com/openssl/openssl.git checkouts/openssl-3.2 -- --depth=1 --branch openssl-3.2
-else
- cd checkouts/openssl-3.2
- echo -n "checkouts/openssl-3.2 - "
- git pull
- cd ../..
-fi
-if [ ! -d "checkouts/openssl-3.1" ]; then
- gh repo clone https://github.com/openssl/openssl.git checkouts/openssl-3.1 -- --depth=1 --branch openssl-3.1
-else
- cd checkouts/openssl-3.1
- echo -n "checkouts/openssl-3.1 - "
- git pull
- cd ../..
-fi
-if [ ! -d "checkouts/openssl-3.0" ]; then
- gh repo clone https://github.com/openssl/openssl.git checkouts/openssl-3.0 -- --depth=1 --branch openssl-3.0
-else
- cd checkouts/openssl-3.0
- echo -n "checkouts/openssl-3.0 - "
- git pull
- cd ../..
-fi
-if [ ! -d "checkouts/openssl-1.1.1-stable" ]; then
- gh repo clone https://github.com/openssl/openssl.git checkouts/openssl-1.1.1-stable -- --depth=1 --branch OpenSSL_1_1_1-stable
-else
- cd checkouts/openssl-1.1.1-stable
- echo -n "checkouts/openssl-1.1.1-stable - "
- git pull
- cd ../..
-fi
-if [ ! -d "checkouts/openssl-1.0.2-stable" ]; then
- gh repo clone https://github.com/openssl/openssl.git checkouts/openssl-1.0.2-stable -- --depth=1 --branch OpenSSL_1_0_2-stable
-else
- cd checkouts/openssl-1.0.2-stable
- echo -n "checkouts/openssl-1.0.2-stable - "
- git pull
- cd ../..
-fi
+mkdir -p checkouts
+declare -A repo
+repo["data"]="https://github.openssl.org/omc/data.git"
+repo["general-policies"]="https://github.com/openssl/general-policies.git"
+repo["technical-policies"]="https://github.com/openssl/technical-policies.git"
+repo["openssl"]="https://github.com/openssl/openssl.git master"
+repo["openssl-3.2"]="https://github.com/openssl/openssl.git openssl-3.2"
+repo["openssl-3.1"]="https://github.com/openssl/openssl.git openssl-3.1"
+repo["openssl-3.0"]="https://github.com/openssl/openssl.git openssl-3.0"
+repo["openssl-1.1.1-stable"]="https://github.com/openssl/openssl.git OpenSSL_1_1_1-stable"
+repo["openssl-1.0.2-stable"]="https://github.com/openssl/openssl.git OpenSSL_1_0_2-stable"
+
+for folder_key in "${!repo[@]}"; do
+ declare val
+ read -a "val" <<<"${repo[$folder_key]}"
+ REPO_URL=${val[0]}
+ REPO_BRANCH=${val[1]}
+ echo "Processing repository $REPO_URL and storing to: checkouts/$folder_key"
+ branch_cmd=''
+ if [ ! -z $REPO_BRANCH ]; then
+ branch_cmd="--branch $REPO_BRANCH"
+ fi
+ if [ ! -d checkouts/$folder_key ]; then
+ echo "Cloning repository to folder $folder_key: "
+ gh repo clone $REPO_URL checkouts/$folder_key -- --depth=1 $branch_cmd
+ else
+ cd checkouts/$folder_key || exit
+ echo -n "Already exists so updating checkouts/$folder_key - "
+ git pull
+ cd ../..
+ fi
+done
releases="$*"
-log () {
+log() {
echo >&2 "$@"
}
echo >&2 -n "$r : "
for s in 1 3 5 7; do
- echo >&2 -n $s
- for f in docs/man$r/man$s/*.md.tt; do
- $debug -n '.'
- existsin=''
-#
- b=$(basename_custom $f md.tt)
+ echo >&2 -n $s
+ for f in docs/man$r/man$s/*.md.tt; do
+ $debug -n '.'
+ existsin=''
+ #
+ b=$(basename_custom $f md.tt)
- for x in $releases; do
- if [ "$x" = "$r" ]; then
- continue
- fi
- if [ -f "docs/man$x/man$s/$b.md.tt" ]; then
- existsin="$existsin $x"
- fi
- done
- (
- if [ "$existsin" != "" ]; then
- echo "sidebar: |"
- echo " # This manpage"
- for x in $existsin; do
- echo " - [$x version]([% top %]docs/man$x/man$s/$b.html)"
- done
- fi
- ) > "docs/man$r/man$s/$b.cross"
- done
+ for x in $releases; do
+ if [ "$x" = "$r" ]; then
+ continue
+ fi
+ if [ -f "docs/man$x/man$s/$b.md.tt" ]; then
+ existsin="$existsin $x"
+ fi
+ done
+ (
+ if [ "$existsin" != "" ]; then
+ echo "sidebar: |"
+ echo " # This manpage"
+ for x in $existsin; do
+ echo " - [$x version]([% top %]docs/man$x/man$s/$b.html)"
+ done
+ fi
+ ) >"docs/man$r/man$s/$b.cross"
+ done
done
echo >&2
done
if [ "$F" != "$Dn/$Fn" ]; then
# HTML file, which we treat specially
job_pool_run process_file $F $HERE $series $Dn $Fn $srcdir $destdir
- # G=$Dn/$Fn.inc
- # echo "strip-man-html < $srcdir/$F > $destdir/$G"
- # $HERE/strip-man-html < $srcdir/$F > $destdir/$G
-
- # section=$(basename $Dn | sed -e 's|^man||')
- # description="$($HERE/all-html-man-names < $destdir/$G | sed -e 's|^.* - ||' -e 's|\&|\\\&|g')"
- # names="$($HERE/all-html-man-names < $destdir/$G | sed -e 's| - .*||' -e 's|, *| |g' -e 's|/|-|g')"
- # for name in $names; do
- # G=$Dn/$name.md.tt
- # cat $HERE/../inc/manpage-template.mdtt \
- # | sed -E \
- # -e "s|\\\$release\\\$|$series|g" \
- # -e "s|\\\$sectnum\\\$|$section|g" \
- # -e "s|\\\$description\\\$|$description|g" \
- # -e "s|\\\$name\\\$|$name|g" \
- # -e "s|\\\$origname\\\$|$Fn|g" \
- # > $destdir/$G
- # done
else
# Other file types, such as images. We simply copy those
G=$Dn/$Fn
my $locale = $query->find_person_tag($email, 'country');
my $pgpid = $query->find_person_tag($email, 'pgp');
$data{$name} = { email => $email, locale => $locale, pgpid => $pgpid,
- active => !!($groupname !~ m|-inactive$|),
- emeritus => !!($groupname =~ m|-emeritus$|) };
+ active => !!($groupname !~ m|-inactive$|),
+ emeritus => !!($groupname =~ m|-emeritus$|) };
}
}
print "<table summary=\"$options{title}\">\n";
print " <tr>\n";
print join(" <th> </th>\n",
- map {" <th>$_</th>\n"} @columns);
+ map {" <th>$_</th>\n"} @columns);
print " </tr>\n";
foreach my $key (sort {mk_sortable($a) cmp mk_sortable($b)} keys %data) {
#!/bin/bash
# returns basename of filepath and removes extension if specified
function basename_custom {
- if [[ -n $1 ]]; then
- filepath=$1
- file_basename="${filepath##*/}"
- if [[ -n $2 ]]; then
- file_basename="${file_basename%.$2}"
- fi
- echo $file_basename
- else
- exit 1
- fi
+ if [[ -n $1 ]]; then
+ filepath=$1
+ file_basename="${filepath##*/}"
+ if [[ -n $2 ]]; then
+ file_basename="${file_basename%.$2}"
+ fi
+ echo $file_basename
+ else
+ exit 1
+ fi
}
# returns dirname from filepath
function dirname_custom {
- if [[ -n $1 ]]; then
- filepath=$1
- echo ${filepath%/*}
- fi
+ if [[ -n $1 ]]; then
+ filepath=$1
+ echo ${filepath%/*}
+ fi
}
# Find line matching TEXT parse string and return it
function get_description {
- FILE="$1"
- while read line; do
+ FILE="$1"
+ while read line; do
- if [[ $line =~ ^OSSL-description: ]]; then
- line=${line#*'"'}
- line=${line%'"'*}
- echo "$line"
- break
- fi
- done <"$FILE"
+ if [[ $line =~ ^OSSL-description: ]]; then
+ line=${line#*'"'}
+ line=${line%'"'*}
+ echo "$line"
+ break
+ fi
+ done <"$FILE"
}
# get_descriptiom ../docs/man3.2/man3/BIO_get_rpoll_descriptor.md.tt
codebase.
All fixed security bugs are listed on our
-[vulnerabilities page](../news/vulnerabilities.html).
+[vulnerabilities page](/news/vulnerabilities.html).
## <a name="bugs">Reporting Bugs</a>
TLS connections that are in non-FIPS mode simultaneously.
For more up to date information related to 3.0 please follow the links
-at [www.openssl.org/docs](../docs/)
+at [www.openssl.org/docs](/docs/index.html)
## Terms Used in This Document {#terms-used-in-this-document}
This FIPS module is validated, and has the certificate number [\#4282].
The certificate page includes a link to the Security Policy.
-[the FIPS module manual page]: man3.0/man7/fips_module.html
+[the FIPS module manual page]: /docs/man3.0/man7/fips_module.html
[\#4282]: https://csrc.nist.gov/projects/cryptographic-module-validation-program/certificate/4282
+---
+breadcrumb: Home
+---
+
# Welcome to OpenSSL!
The OpenSSL Project develops and maintains the OpenSSL software - a robust,
[CVE-2021-4044] | 3.0.1 | no |
| | | | **Release of 3.0.0 FIPS provider**
-[CVE-2023-4807]: vulnerabilities.html#CVE-2023-4807
-[CVE-2023-3817]: vulnerabilities.html#CVE-2023-3817
-[CVE-2023-3446]: vulnerabilities.html#CVE-2023-3446
-[CVE-2023-2975]: vulnerabilities.html#CVE-2023-2975
-[CVE-2023-2650]: vulnerabilities.html#CVE-2023-2650
-[CVE-2023-1255]: vulnerabilities.html#CVE-2023-1255
-[CVE-2023-0466]: vulnerabilities.html#CVE-2023-0466
-[CVE-2023-0465]: vulnerabilities.html#CVE-2023-0465
-[CVE-2023-0464]: vulnerabilities.html#CVE-2023-0464
-[CVE-2023-0401]: vulnerabilities.html#CVE-2023-0401
-[CVE-2023-0286]: vulnerabilities.html#CVE-2023-0286
-[CVE-2023-0217]: vulnerabilities.html#CVE-2023-0217
-[CVE-2023-0216]: vulnerabilities.html#CVE-2023-0216
-[CVE-2023-0215]: vulnerabilities.html#CVE-2023-0215
-[CVE-2022-4450]: vulnerabilities.html#CVE-2022-4450
-[CVE-2022-4304]: vulnerabilities.html#CVE-2022-4304
-[CVE-2022-4203]: vulnerabilities.html#CVE-2022-4203
-[CVE-2022-3996]: vulnerabilities.html#CVE-2022-3996
-[CVE-2022-3786]: vulnerabilities.html#CVE-2022-3786
-[CVE-2022-3602]: vulnerabilities.html#CVE-2022-3602
-[CVE-2022-3358]: vulnerabilities.html#CVE-2022-3358
-[CVE-2022-2274]: vulnerabilities.html#CVE-2022-2274
-[CVE-2022-2097]: vulnerabilities.html#CVE-2022-2097
-[CVE-2022-2068]: vulnerabilities.html#CVE-2022-2068
-[CVE-2022-1473]: vulnerabilities.html#CVE-2022-1473
-[CVE-2022-1434]: vulnerabilities.html#CVE-2022-1434
-[CVE-2022-1343]: vulnerabilities.html#CVE-2022-1343
-[CVE-2022-1292]: vulnerabilities.html#CVE-2022-1292
-[CVE-2022-0778]: vulnerabilities.html#CVE-2022-0778
-[CVE-2021-4160]: vulnerabilities.html#CVE-2021-4160
-[CVE-2021-4044]: vulnerabilities.html#CVE-2021-4044
+[CVE-2023-4807]: /news/vulnerabilities.html#CVE-2023-4807
+[CVE-2023-3817]: /news/vulnerabilities.html#CVE-2023-3817
+[CVE-2023-3446]: /news/vulnerabilities.html#CVE-2023-3446
+[CVE-2023-2975]: /news/vulnerabilities.html#CVE-2023-2975
+[CVE-2023-2650]: /news/vulnerabilities.html#CVE-2023-2650
+[CVE-2023-1255]: /news/vulnerabilities.html#CVE-2023-1255
+[CVE-2023-0466]: /news/vulnerabilities.html#CVE-2023-0466
+[CVE-2023-0465]: /news/vulnerabilities.html#CVE-2023-0465
+[CVE-2023-0464]: /news/vulnerabilities.html#CVE-2023-0464
+[CVE-2023-0401]: /news/vulnerabilities.html#CVE-2023-0401
+[CVE-2023-0286]: /news/vulnerabilities.html#CVE-2023-0286
+[CVE-2023-0217]: /news/vulnerabilities.html#CVE-2023-0217
+[CVE-2023-0216]: /news/vulnerabilities.html#CVE-2023-0216
+[CVE-2023-0215]: /news/vulnerabilities.html#CVE-2023-0215
+[CVE-2022-4450]: /news/vulnerabilities.html#CVE-2022-4450
+[CVE-2022-4304]: /news/vulnerabilities.html#CVE-2022-4304
+[CVE-2022-4203]: /news/vulnerabilities.html#CVE-2022-4203
+[CVE-2022-3996]: /news/vulnerabilities.html#CVE-2022-3996
+[CVE-2022-3786]: /news/vulnerabilities.html#CVE-2022-3786
+[CVE-2022-3602]: /news/vulnerabilities.html#CVE-2022-3602
+[CVE-2022-3358]: /news/vulnerabilities.html#CVE-2022-3358
+[CVE-2022-2274]: /news/vulnerabilities.html#CVE-2022-2274
+[CVE-2022-2097]: /news/vulnerabilities.html#CVE-2022-2097
+[CVE-2022-2068]: /news/vulnerabilities.html#CVE-2022-2068
+[CVE-2022-1473]: /news/vulnerabilities.html#CVE-2022-1473
+[CVE-2022-1434]: /news/vulnerabilities.html#CVE-2022-1434
+[CVE-2022-1343]: /news/vulnerabilities.html#CVE-2022-1343
+[CVE-2022-1292]: /news/vulnerabilities.html#CVE-2022-1292
+[CVE-2022-0778]: /news/vulnerabilities.html#CVE-2022-0778
+[CVE-2021-4160]: /news/vulnerabilities.html#CVE-2021-4160
+[CVE-2021-4044]: /news/vulnerabilities.html#CVE-2021-4044
#### FIPS
- OpenSSL 3.0 FIPS Provider has had its FIPS 140-2 validation certificate issued.
-See the [blog post](blog/blog/2022/08/24/FIPS-validation-certificate-issued/)
+See the [blog post](/blog/blog/2022/08/24/FIPS-validation-certificate-issued/)
- The OpenSSL 3.1 release will be about FIPS 140-3 validation submission.
-See the [blog post](blog/blog/2022/09/30/fips-140-3/)
+See the [blog post](/blog/blog/2022/09/30/fips-140-3/)
#### Post-quantum cryptography