* Update macaron * Various wiki bug fixestags/v1.21.12.1
| @@ -0,0 +1 @@ | |||
| ref: refs/heads/master | |||
| @@ -0,0 +1,4 @@ | |||
| [core] | |||
| repositoryformatversion = 0 | |||
| filemode = true | |||
| bare = true | |||
| @@ -0,0 +1 @@ | |||
| Unnamed repository; edit this file 'description' to name the repository. | |||
| @@ -0,0 +1,15 @@ | |||
| #!/bin/sh | |||
| # | |||
| # An example hook script to check the commit log message taken by | |||
| # applypatch from an e-mail message. | |||
| # | |||
| # The hook should exit with non-zero status after issuing an | |||
| # appropriate message if it wants to stop the commit. The hook is | |||
| # allowed to edit the commit message file. | |||
| # | |||
| # To enable this hook, rename this file to "applypatch-msg". | |||
| . git-sh-setup | |||
| commitmsg="$(git rev-parse --git-path hooks/commit-msg)" | |||
| test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} | |||
| : | |||
| @@ -0,0 +1,24 @@ | |||
| #!/bin/sh | |||
| # | |||
| # An example hook script to check the commit log message. | |||
| # Called by "git commit" with one argument, the name of the file | |||
| # that has the commit message. The hook should exit with non-zero | |||
| # status after issuing an appropriate message if it wants to stop the | |||
| # commit. The hook is allowed to edit the commit message file. | |||
| # | |||
| # To enable this hook, rename this file to "commit-msg". | |||
| # Uncomment the below to add a Signed-off-by line to the message. | |||
| # Doing this in a hook is a bad idea in general, but the prepare-commit-msg | |||
| # hook is more suited to it. | |||
| # | |||
| # SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') | |||
| # grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" | |||
| # This example catches duplicate Signed-off-by lines. | |||
| test "" = "$(grep '^Signed-off-by: ' "$1" | | |||
| sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { | |||
| echo >&2 Duplicate Signed-off-by lines. | |||
| exit 1 | |||
| } | |||
| @@ -0,0 +1,8 @@ | |||
| #!/bin/sh | |||
| # | |||
| # An example hook script to prepare a packed repository for use over | |||
| # dumb transports. | |||
| # | |||
| # To enable this hook, rename this file to "post-update". | |||
| exec git update-server-info | |||
| @@ -0,0 +1,14 @@ | |||
| #!/bin/sh | |||
| # | |||
| # An example hook script to verify what is about to be committed | |||
| # by applypatch from an e-mail message. | |||
| # | |||
| # The hook should exit with non-zero status after issuing an | |||
| # appropriate message if it wants to stop the commit. | |||
| # | |||
| # To enable this hook, rename this file to "pre-applypatch". | |||
| . git-sh-setup | |||
| precommit="$(git rev-parse --git-path hooks/pre-commit)" | |||
| test -x "$precommit" && exec "$precommit" ${1+"$@"} | |||
| : | |||
| @@ -0,0 +1,49 @@ | |||
| #!/bin/sh | |||
| # | |||
| # An example hook script to verify what is about to be committed. | |||
| # Called by "git commit" with no arguments. The hook should | |||
| # exit with non-zero status after issuing an appropriate message if | |||
| # it wants to stop the commit. | |||
| # | |||
| # To enable this hook, rename this file to "pre-commit". | |||
| if git rev-parse --verify HEAD >/dev/null 2>&1 | |||
| then | |||
| against=HEAD | |||
| else | |||
| # Initial commit: diff against an empty tree object | |||
| against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 | |||
| fi | |||
| # If you want to allow non-ASCII filenames set this variable to true. | |||
| allownonascii=$(git config --bool hooks.allownonascii) | |||
| # Redirect output to stderr. | |||
| exec 1>&2 | |||
| # Cross platform projects tend to avoid non-ASCII filenames; prevent | |||
| # them from being added to the repository. We exploit the fact that the | |||
| # printable range starts at the space character and ends with tilde. | |||
| if [ "$allownonascii" != "true" ] && | |||
| # Note that the use of brackets around a tr range is ok here, (it's | |||
| # even required, for portability to Solaris 10's /usr/bin/tr), since | |||
| # the square bracket bytes happen to fall in the designated range. | |||
| test $(git diff --cached --name-only --diff-filter=A -z $against | | |||
| LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 | |||
| then | |||
| cat <<\EOF | |||
| Error: Attempt to add a non-ASCII file name. | |||
| This can cause problems if you want to work with people on other platforms. | |||
| To be portable it is advisable to rename the file. | |||
| If you know what you are doing you can disable this check using: | |||
| git config hooks.allownonascii true | |||
| EOF | |||
| exit 1 | |||
| fi | |||
| # If there are whitespace errors, print the offending file names and fail. | |||
| exec git diff-index --check --cached $against -- | |||
| @@ -0,0 +1,53 @@ | |||
| #!/bin/sh | |||
| # An example hook script to verify what is about to be pushed. Called by "git | |||
| # push" after it has checked the remote status, but before anything has been | |||
| # pushed. If this script exits with a non-zero status nothing will be pushed. | |||
| # | |||
| # This hook is called with the following parameters: | |||
| # | |||
| # $1 -- Name of the remote to which the push is being done | |||
| # $2 -- URL to which the push is being done | |||
| # | |||
| # If pushing without using a named remote those arguments will be equal. | |||
| # | |||
| # Information about the commits which are being pushed is supplied as lines to | |||
| # the standard input in the form: | |||
| # | |||
| # <local ref> <local sha1> <remote ref> <remote sha1> | |||
| # | |||
| # This sample shows how to prevent push of commits where the log message starts | |||
| # with "WIP" (work in progress). | |||
| remote="$1" | |||
| url="$2" | |||
| z40=0000000000000000000000000000000000000000 | |||
| while read local_ref local_sha remote_ref remote_sha | |||
| do | |||
| if [ "$local_sha" = $z40 ] | |||
| then | |||
| # Handle delete | |||
| : | |||
| else | |||
| if [ "$remote_sha" = $z40 ] | |||
| then | |||
| # New branch, examine all commits | |||
| range="$local_sha" | |||
| else | |||
| # Update to existing branch, examine new commits | |||
| range="$remote_sha..$local_sha" | |||
| fi | |||
| # Check for WIP commit | |||
| commit=`git rev-list -n 1 --grep '^WIP' "$range"` | |||
| if [ -n "$commit" ] | |||
| then | |||
| echo >&2 "Found WIP commit in $local_ref, not pushing" | |||
| exit 1 | |||
| fi | |||
| fi | |||
| done | |||
| exit 0 | |||
| @@ -0,0 +1,169 @@ | |||
| #!/bin/sh | |||
| # | |||
| # Copyright (c) 2006, 2008 Junio C Hamano | |||
| # | |||
| # The "pre-rebase" hook is run just before "git rebase" starts doing | |||
| # its job, and can prevent the command from running by exiting with | |||
| # non-zero status. | |||
| # | |||
| # The hook is called with the following parameters: | |||
| # | |||
| # $1 -- the upstream the series was forked from. | |||
| # $2 -- the branch being rebased (or empty when rebasing the current branch). | |||
| # | |||
| # This sample shows how to prevent topic branches that are already | |||
| # merged to 'next' branch from getting rebased, because allowing it | |||
| # would result in rebasing already published history. | |||
| publish=next | |||
| basebranch="$1" | |||
| if test "$#" = 2 | |||
| then | |||
| topic="refs/heads/$2" | |||
| else | |||
| topic=`git symbolic-ref HEAD` || | |||
| exit 0 ;# we do not interrupt rebasing detached HEAD | |||
| fi | |||
| case "$topic" in | |||
| refs/heads/??/*) | |||
| ;; | |||
| *) | |||
| exit 0 ;# we do not interrupt others. | |||
| ;; | |||
| esac | |||
| # Now we are dealing with a topic branch being rebased | |||
| # on top of master. Is it OK to rebase it? | |||
| # Does the topic really exist? | |||
| git show-ref -q "$topic" || { | |||
| echo >&2 "No such branch $topic" | |||
| exit 1 | |||
| } | |||
| # Is topic fully merged to master? | |||
| not_in_master=`git rev-list --pretty=oneline ^master "$topic"` | |||
| if test -z "$not_in_master" | |||
| then | |||
| echo >&2 "$topic is fully merged to master; better remove it." | |||
| exit 1 ;# we could allow it, but there is no point. | |||
| fi | |||
| # Is topic ever merged to next? If so you should not be rebasing it. | |||
| only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` | |||
| only_next_2=`git rev-list ^master ${publish} | sort` | |||
| if test "$only_next_1" = "$only_next_2" | |||
| then | |||
| not_in_topic=`git rev-list "^$topic" master` | |||
| if test -z "$not_in_topic" | |||
| then | |||
| echo >&2 "$topic is already up-to-date with master" | |||
| exit 1 ;# we could allow it, but there is no point. | |||
| else | |||
| exit 0 | |||
| fi | |||
| else | |||
| not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` | |||
| /usr/bin/perl -e ' | |||
| my $topic = $ARGV[0]; | |||
| my $msg = "* $topic has commits already merged to public branch:\n"; | |||
| my (%not_in_next) = map { | |||
| /^([0-9a-f]+) /; | |||
| ($1 => 1); | |||
| } split(/\n/, $ARGV[1]); | |||
| for my $elem (map { | |||
| /^([0-9a-f]+) (.*)$/; | |||
| [$1 => $2]; | |||
| } split(/\n/, $ARGV[2])) { | |||
| if (!exists $not_in_next{$elem->[0]}) { | |||
| if ($msg) { | |||
| print STDERR $msg; | |||
| undef $msg; | |||
| } | |||
| print STDERR " $elem->[1]\n"; | |||
| } | |||
| } | |||
| ' "$topic" "$not_in_next" "$not_in_master" | |||
| exit 1 | |||
| fi | |||
| <<\DOC_END | |||
| This sample hook safeguards topic branches that have been | |||
| published from being rewound. | |||
| The workflow assumed here is: | |||
| * Once a topic branch forks from "master", "master" is never | |||
| merged into it again (either directly or indirectly). | |||
| * Once a topic branch is fully cooked and merged into "master", | |||
| it is deleted. If you need to build on top of it to correct | |||
| earlier mistakes, a new topic branch is created by forking at | |||
| the tip of the "master". This is not strictly necessary, but | |||
| it makes it easier to keep your history simple. | |||
| * Whenever you need to test or publish your changes to topic | |||
| branches, merge them into "next" branch. | |||
| The script, being an example, hardcodes the publish branch name | |||
| to be "next", but it is trivial to make it configurable via | |||
| $GIT_DIR/config mechanism. | |||
| With this workflow, you would want to know: | |||
| (1) ... if a topic branch has ever been merged to "next". Young | |||
| topic branches can have stupid mistakes you would rather | |||
| clean up before publishing, and things that have not been | |||
| merged into other branches can be easily rebased without | |||
| affecting other people. But once it is published, you would | |||
| not want to rewind it. | |||
| (2) ... if a topic branch has been fully merged to "master". | |||
| Then you can delete it. More importantly, you should not | |||
| build on top of it -- other people may already want to | |||
| change things related to the topic as patches against your | |||
| "master", so if you need further changes, it is better to | |||
| fork the topic (perhaps with the same name) afresh from the | |||
| tip of "master". | |||
| Let's look at this example: | |||
| o---o---o---o---o---o---o---o---o---o "next" | |||
| / / / / | |||
| / a---a---b A / / | |||
| / / / / | |||
| / / c---c---c---c B / | |||
| / / / \ / | |||
| / / / b---b C \ / | |||
| / / / / \ / | |||
| ---o---o---o---o---o---o---o---o---o---o---o "master" | |||
| A, B and C are topic branches. | |||
| * A has one fix since it was merged up to "next". | |||
| * B has finished. It has been fully merged up to "master" and "next", | |||
| and is ready to be deleted. | |||
| * C has not merged to "next" at all. | |||
| We would want to allow C to be rebased, refuse A, and encourage | |||
| B to be deleted. | |||
| To compute (1): | |||
| git rev-list ^master ^topic next | |||
| git rev-list ^master next | |||
| if these match, topic has not merged in next at all. | |||
| To compute (2): | |||
| git rev-list master..topic | |||
| if this is empty, it is fully merged to "master". | |||
| DOC_END | |||
| @@ -0,0 +1,36 @@ | |||
| #!/bin/sh | |||
| # | |||
| # An example hook script to prepare the commit log message. | |||
| # Called by "git commit" with the name of the file that has the | |||
| # commit message, followed by the description of the commit | |||
| # message's source. The hook's purpose is to edit the commit | |||
| # message file. If the hook fails with a non-zero status, | |||
| # the commit is aborted. | |||
| # | |||
| # To enable this hook, rename this file to "prepare-commit-msg". | |||
| # This hook includes three examples. The first comments out the | |||
| # "Conflicts:" part of a merge commit. | |||
| # | |||
| # The second includes the output of "git diff --name-status -r" | |||
| # into the message, just before the "git status" output. It is | |||
| # commented because it doesn't cope with --amend or with squashed | |||
| # commits. | |||
| # | |||
| # The third example adds a Signed-off-by line to the message, that can | |||
| # still be edited. This is rarely a good idea. | |||
| case "$2,$3" in | |||
| merge,) | |||
| /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; | |||
| # ,|template,) | |||
| # /usr/bin/perl -i.bak -pe ' | |||
| # print "\n" . `git diff --cached --name-status -r` | |||
| # if /^#/ && $first++ == 0' "$1" ;; | |||
| *) ;; | |||
| esac | |||
| # SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') | |||
| # grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" | |||
| @@ -0,0 +1,128 @@ | |||
| #!/bin/sh | |||
| # | |||
| # An example hook script to block unannotated tags from entering. | |||
| # Called by "git receive-pack" with arguments: refname sha1-old sha1-new | |||
| # | |||
| # To enable this hook, rename this file to "update". | |||
| # | |||
| # Config | |||
| # ------ | |||
| # hooks.allowunannotated | |||
| # This boolean sets whether unannotated tags will be allowed into the | |||
| # repository. By default they won't be. | |||
| # hooks.allowdeletetag | |||
| # This boolean sets whether deleting tags will be allowed in the | |||
| # repository. By default they won't be. | |||
| # hooks.allowmodifytag | |||
| # This boolean sets whether a tag may be modified after creation. By default | |||
| # it won't be. | |||
| # hooks.allowdeletebranch | |||
| # This boolean sets whether deleting branches will be allowed in the | |||
| # repository. By default they won't be. | |||
| # hooks.denycreatebranch | |||
| # This boolean sets whether remotely creating branches will be denied | |||
| # in the repository. By default this is allowed. | |||
| # | |||
| # --- Command line | |||
| refname="$1" | |||
| oldrev="$2" | |||
| newrev="$3" | |||
| # --- Safety check | |||
| if [ -z "$GIT_DIR" ]; then | |||
| echo "Don't run this script from the command line." >&2 | |||
| echo " (if you want, you could supply GIT_DIR then run" >&2 | |||
| echo " $0 <ref> <oldrev> <newrev>)" >&2 | |||
| exit 1 | |||
| fi | |||
| if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then | |||
| echo "usage: $0 <ref> <oldrev> <newrev>" >&2 | |||
| exit 1 | |||
| fi | |||
| # --- Config | |||
| allowunannotated=$(git config --bool hooks.allowunannotated) | |||
| allowdeletebranch=$(git config --bool hooks.allowdeletebranch) | |||
| denycreatebranch=$(git config --bool hooks.denycreatebranch) | |||
| allowdeletetag=$(git config --bool hooks.allowdeletetag) | |||
| allowmodifytag=$(git config --bool hooks.allowmodifytag) | |||
| # check for no description | |||
| projectdesc=$(sed -e '1q' "$GIT_DIR/description") | |||
| case "$projectdesc" in | |||
| "Unnamed repository"* | "") | |||
| echo "*** Project description file hasn't been set" >&2 | |||
| exit 1 | |||
| ;; | |||
| esac | |||
| # --- Check types | |||
| # if $newrev is 0000...0000, it's a commit to delete a ref. | |||
| zero="0000000000000000000000000000000000000000" | |||
| if [ "$newrev" = "$zero" ]; then | |||
| newrev_type=delete | |||
| else | |||
| newrev_type=$(git cat-file -t $newrev) | |||
| fi | |||
| case "$refname","$newrev_type" in | |||
| refs/tags/*,commit) | |||
| # un-annotated tag | |||
| short_refname=${refname##refs/tags/} | |||
| if [ "$allowunannotated" != "true" ]; then | |||
| echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 | |||
| echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 | |||
| exit 1 | |||
| fi | |||
| ;; | |||
| refs/tags/*,delete) | |||
| # delete tag | |||
| if [ "$allowdeletetag" != "true" ]; then | |||
| echo "*** Deleting a tag is not allowed in this repository" >&2 | |||
| exit 1 | |||
| fi | |||
| ;; | |||
| refs/tags/*,tag) | |||
| # annotated tag | |||
| if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 | |||
| then | |||
| echo "*** Tag '$refname' already exists." >&2 | |||
| echo "*** Modifying a tag is not allowed in this repository." >&2 | |||
| exit 1 | |||
| fi | |||
| ;; | |||
| refs/heads/*,commit) | |||
| # branch | |||
| if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then | |||
| echo "*** Creating a branch is not allowed in this repository" >&2 | |||
| exit 1 | |||
| fi | |||
| ;; | |||
| refs/heads/*,delete) | |||
| # delete branch | |||
| if [ "$allowdeletebranch" != "true" ]; then | |||
| echo "*** Deleting a branch is not allowed in this repository" >&2 | |||
| exit 1 | |||
| fi | |||
| ;; | |||
| refs/remotes/*,commit) | |||
| # tracking branch | |||
| ;; | |||
| refs/remotes/*,delete) | |||
| # delete tracking branch | |||
| if [ "$allowdeletebranch" != "true" ]; then | |||
| echo "*** Deleting a tracking branch is not allowed in this repository" >&2 | |||
| exit 1 | |||
| fi | |||
| ;; | |||
| *) | |||
| # Anything else (is there anything else?) | |||
| echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 | |||
| exit 1 | |||
| ;; | |||
| esac | |||
| # --- Finished | |||
| exit 0 | |||
| @@ -0,0 +1,6 @@ | |||
| # git ls-files --others --exclude-from=.git/info/exclude | |||
| # Lines that start with '#' are comments. | |||
| # For a project mostly in C, the following would be a good set of | |||
| # exclude patterns (uncomment them if you want to use them): | |||
| # *.[oa] | |||
| # *~ | |||
| @@ -0,0 +1 @@ | |||
| 2c54faec6c45d31c1abfaecdab471eac6633738a | |||
| @@ -191,7 +191,7 @@ type ErrWikiAlreadyExist struct { | |||
| Title string | |||
| } | |||
| // IsErrWikiAlreadyExist checks if an error is a ErrWikiAlreadyExist. | |||
| // IsErrWikiAlreadyExist checks if an error is an ErrWikiAlreadyExist. | |||
| func IsErrWikiAlreadyExist(err error) bool { | |||
| _, ok := err.(ErrWikiAlreadyExist) | |||
| return ok | |||
| @@ -201,6 +201,21 @@ func (err ErrWikiAlreadyExist) Error() string { | |||
| return fmt.Sprintf("wiki page already exists [title: %s]", err.Title) | |||
| } | |||
| // ErrWikiReservedName represents a reserved name error. | |||
| type ErrWikiReservedName struct { | |||
| Title string | |||
| } | |||
| // IsErrWikiReservedName checks if an error is an ErrWikiReservedName. | |||
| func IsErrWikiReservedName(err error) bool { | |||
| _, ok := err.(ErrWikiReservedName) | |||
| return ok | |||
| } | |||
| func (err ErrWikiReservedName) Error() string { | |||
| return fmt.Sprintf("wiki title is reserved: %s", err.Title) | |||
| } | |||
| // __________ ___. .__ .__ ____ __. | |||
| // \______ \__ _\_ |__ | | |__| ____ | |/ _|____ ___.__. | |||
| // | ___/ | \ __ \| | | |/ ___\ | <_/ __ < | | | |||
| @@ -61,7 +61,7 @@ func (t *TwoFactor) getEncryptionKey() []byte { | |||
| // SetSecret sets the 2FA secret. | |||
| func (t *TwoFactor) SetSecret(secret string) error { | |||
| secretBytes, err := com.AESEncrypt(t.getEncryptionKey(), []byte(secret)) | |||
| secretBytes, err := com.AESGCMEncrypt(t.getEncryptionKey(), []byte(secret)) | |||
| if err != nil { | |||
| return err | |||
| } | |||
| @@ -75,7 +75,7 @@ func (t *TwoFactor) ValidateTOTP(passcode string) (bool, error) { | |||
| if err != nil { | |||
| return false, err | |||
| } | |||
| secret, err := com.AESDecrypt(t.getEncryptionKey(), decodedStoredSecret) | |||
| secret, err := com.AESGCMDecrypt(t.getEncryptionKey(), decodedStoredSecret) | |||
| if err != nil { | |||
| return false, err | |||
| } | |||
| @@ -22,22 +22,37 @@ import ( | |||
| ) | |||
| var ( | |||
| reservedWikiPaths = []string{"_pages", "_new", "_edit"} | |||
| reservedWikiNames = []string{"_pages", "_new", "_edit"} | |||
| wikiWorkingPool = sync.NewExclusivePool() | |||
| ) | |||
| // ToWikiPageURL formats a string to corresponding wiki URL name. | |||
| func ToWikiPageURL(name string) string { | |||
| // NormalizeWikiName normalizes a wiki name | |||
| func NormalizeWikiName(name string) string { | |||
| return strings.Replace(name, "-", " ", -1) | |||
| } | |||
| // WikiNameToSubURL converts a wiki name to its corresponding sub-URL. | |||
| func WikiNameToSubURL(name string) string { | |||
| return url.QueryEscape(strings.Replace(name, " ", "-", -1)) | |||
| } | |||
| // ToWikiPageName formats a URL back to corresponding wiki page name, | |||
| // and removes leading characters './' to prevent changing files | |||
| // that are not belong to wiki repository. | |||
| func ToWikiPageName(urlString string) string { | |||
| name, _ := url.QueryUnescape(strings.Replace(urlString, "-", " ", -1)) | |||
| name = strings.Replace(name, "\t", " ", -1) | |||
| return strings.Replace(strings.TrimLeft(name, "./"), "/", " ", -1) | |||
| // WikiNameToFilename converts a wiki name to its corresponding filename. | |||
| func WikiNameToFilename(name string) string { | |||
| name = strings.Replace(name, " ", "-", -1) | |||
| return url.QueryEscape(name) + ".md" | |||
| } | |||
| // WikiFilenameToName converts a wiki filename to its corresponding page name. | |||
| func WikiFilenameToName(filename string) (string, error) { | |||
| if !strings.HasSuffix(filename, ".md") { | |||
| return "", fmt.Errorf("Invalid wiki filename: %s", filename) | |||
| } | |||
| basename := filename[:len(filename)-3] | |||
| unescaped, err := url.QueryUnescape(basename) | |||
| if err != nil { | |||
| return "", err | |||
| } | |||
| return NormalizeWikiName(unescaped), nil | |||
| } | |||
| // WikiCloneLink returns clone URLs of repository wiki. | |||
| @@ -81,7 +96,7 @@ func (repo *Repository) LocalWikiPath() string { | |||
| } | |||
| // UpdateLocalWiki makes sure the local copy of repository wiki is up-to-date. | |||
| func (repo *Repository) UpdateLocalWiki() error { | |||
| func (repo *Repository) updateLocalWiki() error { | |||
| // Don't pass branch name here because it fails to clone and | |||
| // checkout to a specific branch when wiki is an empty repository. | |||
| var branch = "" | |||
| @@ -95,19 +110,19 @@ func discardLocalWikiChanges(localPath string) error { | |||
| return discardLocalRepoBranchChanges(localPath, "master") | |||
| } | |||
| // pathAllowed checks if a wiki path is allowed | |||
| func pathAllowed(path string) error { | |||
| for i := range reservedWikiPaths { | |||
| if path == reservedWikiPaths[i] { | |||
| return ErrWikiAlreadyExist{path} | |||
| // nameAllowed checks if a wiki name is allowed | |||
| func nameAllowed(name string) error { | |||
| for _, reservedName := range reservedWikiNames { | |||
| if name == reservedName { | |||
| return ErrWikiReservedName{name} | |||
| } | |||
| } | |||
| return nil | |||
| } | |||
| // updateWikiPage adds new page to repository wiki. | |||
| func (repo *Repository) updateWikiPage(doer *User, oldWikiPath, wikiPath, content, message string, isNew bool) (err error) { | |||
| if err = pathAllowed(wikiPath); err != nil { | |||
| // updateWikiPage adds a new page to the repository wiki. | |||
| func (repo *Repository) updateWikiPage(doer *User, oldWikiName, newWikiName, content, message string, isNew bool) (err error) { | |||
| if err = nameAllowed(newWikiName); err != nil { | |||
| return err | |||
| } | |||
| @@ -121,23 +136,21 @@ func (repo *Repository) updateWikiPage(doer *User, oldWikiPath, wikiPath, conten | |||
| localPath := repo.LocalWikiPath() | |||
| if err = discardLocalWikiChanges(localPath); err != nil { | |||
| return fmt.Errorf("discardLocalWikiChanges: %v", err) | |||
| } else if err = repo.UpdateLocalWiki(); err != nil { | |||
| } else if err = repo.updateLocalWiki(); err != nil { | |||
| return fmt.Errorf("UpdateLocalWiki: %v", err) | |||
| } | |||
| title := ToWikiPageName(wikiPath) | |||
| filename := path.Join(localPath, wikiPath+".md") | |||
| newWikiPath := path.Join(localPath, WikiNameToFilename(newWikiName)) | |||
| // If not a new file, show perform update not create. | |||
| if isNew { | |||
| if com.IsExist(filename) { | |||
| return ErrWikiAlreadyExist{filename} | |||
| if com.IsExist(newWikiPath) { | |||
| return ErrWikiAlreadyExist{newWikiPath} | |||
| } | |||
| } else { | |||
| file := path.Join(localPath, oldWikiPath+".md") | |||
| if err := os.Remove(file); err != nil { | |||
| return fmt.Errorf("Failed to remove %s: %v", file, err) | |||
| oldWikiPath := path.Join(localPath, WikiNameToFilename(oldWikiName)) | |||
| if err := os.Remove(oldWikiPath); err != nil { | |||
| return fmt.Errorf("Failed to remove %s: %v", oldWikiPath, err) | |||
| } | |||
| } | |||
| @@ -146,15 +159,16 @@ func (repo *Repository) updateWikiPage(doer *User, oldWikiPath, wikiPath, conten | |||
| // as a new page operation. | |||
| // So we want to make sure the symlink is removed before write anything. | |||
| // The new file we created will be in normal text format. | |||
| if err = os.RemoveAll(newWikiPath); err != nil { | |||
| return err | |||
| } | |||
| _ = os.Remove(filename) | |||
| if err = ioutil.WriteFile(filename, []byte(content), 0666); err != nil { | |||
| if err = ioutil.WriteFile(newWikiPath, []byte(content), 0666); err != nil { | |||
| return fmt.Errorf("WriteFile: %v", err) | |||
| } | |||
| if len(message) == 0 { | |||
| message = "Update page '" + title + "'" | |||
| message = "Update page '" + newWikiName + "'" | |||
| } | |||
| if err = git.AddChanges(localPath, true); err != nil { | |||
| return fmt.Errorf("AddChanges: %v", err) | |||
| @@ -174,36 +188,35 @@ func (repo *Repository) updateWikiPage(doer *User, oldWikiPath, wikiPath, conten | |||
| } | |||
| // AddWikiPage adds a new wiki page with a given wikiPath. | |||
| func (repo *Repository) AddWikiPage(doer *User, wikiPath, content, message string) error { | |||
| return repo.updateWikiPage(doer, "", wikiPath, content, message, true) | |||
| func (repo *Repository) AddWikiPage(doer *User, wikiName, content, message string) error { | |||
| return repo.updateWikiPage(doer, "", wikiName, content, message, true) | |||
| } | |||
| // EditWikiPage updates a wiki page identified by its wikiPath, | |||
| // optionally also changing wikiPath. | |||
| func (repo *Repository) EditWikiPage(doer *User, oldWikiPath, wikiPath, content, message string) error { | |||
| return repo.updateWikiPage(doer, oldWikiPath, wikiPath, content, message, false) | |||
| func (repo *Repository) EditWikiPage(doer *User, oldWikiName, newWikiName, content, message string) error { | |||
| return repo.updateWikiPage(doer, oldWikiName, newWikiName, content, message, false) | |||
| } | |||
| // DeleteWikiPage deletes a wiki page identified by its wikiPath. | |||
| func (repo *Repository) DeleteWikiPage(doer *User, wikiPath string) (err error) { | |||
| // DeleteWikiPage deletes a wiki page identified by its path. | |||
| func (repo *Repository) DeleteWikiPage(doer *User, wikiName string) (err error) { | |||
| wikiWorkingPool.CheckIn(com.ToStr(repo.ID)) | |||
| defer wikiWorkingPool.CheckOut(com.ToStr(repo.ID)) | |||
| localPath := repo.LocalWikiPath() | |||
| if err = discardLocalWikiChanges(localPath); err != nil { | |||
| return fmt.Errorf("discardLocalWikiChanges: %v", err) | |||
| } else if err = repo.UpdateLocalWiki(); err != nil { | |||
| } else if err = repo.updateLocalWiki(); err != nil { | |||
| return fmt.Errorf("UpdateLocalWiki: %v", err) | |||
| } | |||
| filename := path.Join(localPath, wikiPath+".md") | |||
| filename := path.Join(localPath, WikiNameToFilename(wikiName)) | |||
| if err := os.Remove(filename); err != nil { | |||
| return fmt.Errorf("Failed to remove %s: %v", filename, err) | |||
| } | |||
| title := ToWikiPageName(wikiPath) | |||
| message := "Delete page '" + title + "'" | |||
| message := "Delete page '" + wikiName + "'" | |||
| if err = git.AddChanges(localPath, true); err != nil { | |||
| return fmt.Errorf("AddChanges: %v", err) | |||
| @@ -5,24 +5,91 @@ | |||
| package models | |||
| import ( | |||
| "path" | |||
| "path/filepath" | |||
| "testing" | |||
| "code.gitea.io/gitea/modules/setting" | |||
| "github.com/Unknwon/com" | |||
| "github.com/stretchr/testify/assert" | |||
| ) | |||
| func TestToWikiPageURL(t *testing.T) { | |||
| assert.Equal(t, "wiki-name", ToWikiPageURL("wiki-name")) | |||
| assert.Equal(t, "wiki-name-with-many-spaces", ToWikiPageURL("wiki name with many spaces")) | |||
| func TestNormalizeWikiName(t *testing.T) { | |||
| type test struct { | |||
| Expected string | |||
| WikiName string | |||
| } | |||
| for _, test := range []test{ | |||
| {"wiki name", "wiki name"}, | |||
| {"wiki name", "wiki-name"}, | |||
| {"name with/slash", "name with/slash"}, | |||
| {"name with%percent", "name-with%percent"}, | |||
| {"%2F", "%2F"}, | |||
| } { | |||
| assert.Equal(t, test.Expected, NormalizeWikiName(test.WikiName)) | |||
| } | |||
| } | |||
| func TestToWikiPageName(t *testing.T) { | |||
| assert.Equal(t, "wiki name", ToWikiPageName("wiki name")) | |||
| assert.Equal(t, "wiki name", ToWikiPageName("wiki-name")) | |||
| assert.Equal(t, "wiki name", ToWikiPageName("wiki\tname")) | |||
| assert.Equal(t, "wiki name", ToWikiPageName("./.././wiki/name")) | |||
| func TestWikiNameToFilename(t *testing.T) { | |||
| type test struct { | |||
| Expected string | |||
| WikiName string | |||
| } | |||
| for _, test := range []test{ | |||
| {"wiki-name.md", "wiki name"}, | |||
| {"wiki-name.md", "wiki-name"}, | |||
| {"name-with%2Fslash.md", "name with/slash"}, | |||
| {"name-with%25percent.md", "name with%percent"}, | |||
| } { | |||
| assert.Equal(t, test.Expected, WikiNameToFilename(test.WikiName)) | |||
| } | |||
| } | |||
| func TestWikiNameToSubURL(t *testing.T) { | |||
| type test struct { | |||
| Expected string | |||
| WikiName string | |||
| } | |||
| for _, test := range []test{ | |||
| {"wiki-name", "wiki name"}, | |||
| {"wiki-name", "wiki-name"}, | |||
| {"name-with%2Fslash", "name with/slash"}, | |||
| {"name-with%25percent", "name with%percent"}, | |||
| } { | |||
| assert.Equal(t, test.Expected, WikiNameToSubURL(test.WikiName)) | |||
| } | |||
| } | |||
| func TestWikiFilenameToName(t *testing.T) { | |||
| type test struct { | |||
| Expected string | |||
| Filename string | |||
| } | |||
| for _, test := range []test{ | |||
| {"hello world", "hello-world.md"}, | |||
| {"symbols/?*", "symbols%2F%3F%2A.md"}, | |||
| } { | |||
| name, err := WikiFilenameToName(test.Filename) | |||
| assert.NoError(t, err) | |||
| assert.Equal(t, test.Expected, name) | |||
| } | |||
| } | |||
| func TestWikiNameToFilenameToName(t *testing.T) { | |||
| // converting from wiki name to filename, then back to wiki name should | |||
| // return the original (normalized) name | |||
| for _, name := range []string{ | |||
| "wiki-name", | |||
| "wiki name", | |||
| "wiki name with/slash", | |||
| "$$$%%%^^&&!@#$(),.<>", | |||
| } { | |||
| filename := WikiNameToFilename(name) | |||
| resultName, err := WikiFilenameToName(filename) | |||
| assert.NoError(t, err) | |||
| assert.Equal(t, NormalizeWikiName(name), resultName) | |||
| } | |||
| } | |||
| func TestRepository_WikiCloneLink(t *testing.T) { | |||
| @@ -47,17 +114,72 @@ func TestRepository_WikiPath(t *testing.T) { | |||
| assert.Equal(t, expected, repo.WikiPath()) | |||
| } | |||
| // TODO TestRepository_HasWiki | |||
| func TestRepository_HasWiki(t *testing.T) { | |||
| prepareTestEnv(t) | |||
| repo1 := AssertExistsAndLoadBean(t, &Repository{ID: 1}).(*Repository) | |||
| assert.True(t, repo1.HasWiki()) | |||
| repo2 := AssertExistsAndLoadBean(t, &Repository{ID: 2}).(*Repository) | |||
| assert.False(t, repo2.HasWiki()) | |||
| } | |||
| func TestRepository_InitWiki(t *testing.T) { | |||
| prepareTestEnv(t) | |||
| // repo1 already has a wiki | |||
| repo1 := AssertExistsAndLoadBean(t, &Repository{ID: 1}).(*Repository) | |||
| assert.NoError(t, repo1.InitWiki()) | |||
| // TODO TestRepository_InitWiki | |||
| // repo2 does not already have a wiki | |||
| repo2 := AssertExistsAndLoadBean(t, &Repository{ID: 2}).(*Repository) | |||
| assert.NoError(t, repo2.InitWiki()) | |||
| assert.True(t, repo2.HasWiki()) | |||
| } | |||
| func TestRepository_LocalWikiPath(t *testing.T) { | |||
| assert.NoError(t, PrepareTestDatabase()) | |||
| prepareTestEnv(t) | |||
| repo := AssertExistsAndLoadBean(t, &Repository{ID: 1}).(*Repository) | |||
| expected := filepath.Join(setting.AppDataPath, "tmp/local-wiki/1") | |||
| assert.Equal(t, expected, repo.LocalWikiPath()) | |||
| } | |||
| // TODO TestRepository_UpdateLocalWiki | |||
| func TestRepository_AddWikiPage(t *testing.T) { | |||
| const wikiContent = "This is the wiki content" | |||
| const commitMsg = "Commit message" | |||
| repo := AssertExistsAndLoadBean(t, &Repository{ID: 1}).(*Repository) | |||
| doer := AssertExistsAndLoadBean(t, &User{ID: 2}).(*User) | |||
| for _, wikiName := range []string{ | |||
| "Another page", | |||
| "Here's a <tag> and a/slash", | |||
| } { | |||
| prepareTestEnv(t) | |||
| assert.NoError(t, repo.AddWikiPage(doer, wikiName, wikiContent, commitMsg)) | |||
| expectedPath := path.Join(repo.LocalWikiPath(), WikiNameToFilename(wikiName)) | |||
| assert.True(t, com.IsExist(expectedPath)) | |||
| } | |||
| } | |||
| func TestRepository_EditWikiPage(t *testing.T) { | |||
| const newWikiContent = "This is the new content" | |||
| const commitMsg = "Commit message" | |||
| repo := AssertExistsAndLoadBean(t, &Repository{ID: 1}).(*Repository) | |||
| doer := AssertExistsAndLoadBean(t, &User{ID: 2}).(*User) | |||
| for _, newWikiName := range []string{ | |||
| "New home", | |||
| "New/name/with/slashes", | |||
| } { | |||
| prepareTestEnv(t) | |||
| assert.NoError(t, repo.EditWikiPage(doer, "Home", newWikiName, newWikiContent, commitMsg)) | |||
| newPath := path.Join(repo.LocalWikiPath(), WikiNameToFilename(newWikiName)) | |||
| assert.True(t, com.IsExist(newPath)) | |||
| oldPath := path.Join(repo.LocalWikiPath(), "Home.md") | |||
| assert.False(t, com.IsExist(oldPath)) | |||
| } | |||
| } | |||
| // TODO ... (all remaining untested functions) | |||
| func TestRepository_DeleteWikiPage(t *testing.T) { | |||
| prepareTestEnv(t) | |||
| repo := AssertExistsAndLoadBean(t, &Repository{ID: 1}).(*Repository) | |||
| doer := AssertExistsAndLoadBean(t, &User{ID: 2}).(*User) | |||
| assert.NoError(t, repo.DeleteWikiPage(doer, "Home")) | |||
| wikiPath := path.Join(repo.LocalWikiPath(), "Home.md") | |||
| assert.False(t, com.IsExist(wikiPath)) | |||
| } | |||
| @@ -36,6 +36,7 @@ type Context struct { | |||
| Session session.Store | |||
| Link string // current request URL | |||
| EscapedLink string | |||
| User *models.User | |||
| IsSigned bool | |||
| IsBasicAuth bool | |||
| @@ -157,7 +158,7 @@ func Contexter() macaron.Handler { | |||
| csrf: x, | |||
| Flash: f, | |||
| Session: sess, | |||
| Link: setting.AppSubURL + strings.TrimSuffix(c.Req.URL.Path, "/"), | |||
| Link: setting.AppSubURL + strings.TrimSuffix(c.Req.URL.EscapedPath(), "/"), | |||
| Repo: &Repository{ | |||
| PullRequest: &PullRequest{}, | |||
| }, | |||
| @@ -11,6 +11,7 @@ import ( | |||
| "fmt" | |||
| "html/template" | |||
| "mime" | |||
| "net/url" | |||
| "path/filepath" | |||
| "runtime" | |||
| "strings" | |||
| @@ -105,6 +106,7 @@ func NewFuncMap() []template.FuncMap { | |||
| "ShortSha": base.ShortSha, | |||
| "MD5": base.EncodeMD5, | |||
| "ActionContent2Commits": ActionContent2Commits, | |||
| "PathEscape": url.PathEscape, | |||
| "EscapePound": func(str string) string { | |||
| return strings.NewReplacer("%", "%25", "#", "%23", " ", "%20", "?", "%3F").Replace(str) | |||
| }, | |||
| @@ -804,6 +804,7 @@ wiki.new_page_button = New Page | |||
| wiki.delete_page_button = Delete Page | |||
| wiki.delete_page_notice_1 = This will delete the page <code>"%s"</code>. Please make sure you want to delete this page. | |||
| wiki.page_already_exists = A wiki page with the same name already exists. | |||
| wiki.reserved_page = The wiki page name %s is reserved, please select a different name. | |||
| wiki.pages = Pages | |||
| wiki.last_updated = Last updated %s | |||
| @@ -7,7 +7,6 @@ package repo | |||
| import ( | |||
| "fmt" | |||
| "io/ioutil" | |||
| "net/url" | |||
| "path/filepath" | |||
| "strings" | |||
| "time" | |||
| @@ -47,140 +46,30 @@ func MustEnableWiki(ctx *context.Context) { | |||
| // PageMeta wiki page meat information | |||
| type PageMeta struct { | |||
| Name string | |||
| URL string | |||
| SubURL string | |||
| Updated time.Time | |||
| } | |||
| func urlEncoded(str string) string { | |||
| u, err := url.Parse(str) | |||
| if err != nil { | |||
| return str | |||
| } | |||
| return u.String() | |||
| } | |||
| func urlDecoded(str string) string { | |||
| res, err := url.QueryUnescape(str) | |||
| if err != nil { | |||
| return str | |||
| } | |||
| return res | |||
| } | |||
| // commitTreeBlobEntry processes found file and checks if it matches search target | |||
| func commitTreeBlobEntry(entry *git.TreeEntry, path string, targets []string, textOnly bool) *git.TreeEntry { | |||
| name := entry.Name() | |||
| ext := filepath.Ext(name) | |||
| if !textOnly || markdown.IsMarkdownFile(name) || ext == ".textile" { | |||
| for _, target := range targets { | |||
| if matchName(path, target) || matchName(urlEncoded(path), target) || matchName(urlDecoded(path), target) { | |||
| return entry | |||
| } | |||
| pathNoExt := strings.TrimSuffix(path, ext) | |||
| if matchName(pathNoExt, target) || matchName(urlEncoded(pathNoExt), target) || matchName(urlDecoded(pathNoExt), target) { | |||
| return entry | |||
| } | |||
| } | |||
| } | |||
| return nil | |||
| } | |||
| // commitTreeDirEntry is a recursive file tree traversal function | |||
| func commitTreeDirEntry(repo *git.Repository, commit *git.Commit, entries []*git.TreeEntry, prevPath string, targets []string, textOnly bool) (*git.TreeEntry, error) { | |||
| for i := range entries { | |||
| entry := entries[i] | |||
| var path string | |||
| if len(prevPath) == 0 { | |||
| path = entry.Name() | |||
| } else { | |||
| path = prevPath + "/" + entry.Name() | |||
| } | |||
| if entry.Type == git.ObjectBlob { | |||
| // File | |||
| if res := commitTreeBlobEntry(entry, path, targets, textOnly); res != nil { | |||
| return res, nil | |||
| } | |||
| } else if entry.IsDir() { | |||
| // Directory | |||
| // Get our tree entry, handling all possible errors | |||
| var err error | |||
| var tree *git.Tree | |||
| if tree, err = repo.GetTree(entry.ID.String()); tree == nil || err != nil { | |||
| if err == nil { | |||
| err = fmt.Errorf("repo.GetTree(%s) => nil", entry.ID.String()) | |||
| } | |||
| return nil, err | |||
| } | |||
| // Found us, get children entries | |||
| var ls git.Entries | |||
| if ls, err = tree.ListEntries(); err != nil { | |||
| return nil, err | |||
| } | |||
| // Call itself recursively to find needed entry | |||
| var te *git.TreeEntry | |||
| if te, err = commitTreeDirEntry(repo, commit, ls, path, targets, textOnly); err != nil { | |||
| return nil, err | |||
| } | |||
| if te != nil { | |||
| return te, nil | |||
| } | |||
| } | |||
| } | |||
| return nil, nil | |||
| } | |||
| // commitTreeEntry is a first step of commitTreeDirEntry, which should be never called directly | |||
| func commitTreeEntry(repo *git.Repository, commit *git.Commit, targets []string, textOnly bool) (*git.TreeEntry, error) { | |||
| // findEntryForFile finds the tree entry for a target filepath. | |||
| func findEntryForFile(commit *git.Commit, target string) (*git.TreeEntry, error) { | |||
| entries, err := commit.ListEntries() | |||
| if err != nil { | |||
| return nil, err | |||
| } | |||
| return commitTreeDirEntry(repo, commit, entries, "", targets, textOnly) | |||
| } | |||
| // findFile finds the best match for given filename in repo file tree | |||
| func findFile(repo *git.Repository, commit *git.Commit, target string, textOnly bool) (*git.TreeEntry, error) { | |||
| targets := []string{target, urlEncoded(target), urlDecoded(target)} | |||
| var entry *git.TreeEntry | |||
| var err error | |||
| if entry, err = commitTreeEntry(repo, commit, targets, textOnly); err != nil { | |||
| return nil, err | |||
| } | |||
| return entry, nil | |||
| } | |||
| // matchName matches generic name representation of the file with required one | |||
| func matchName(target, name string) bool { | |||
| if len(target) != len(name) { | |||
| return false | |||
| } | |||
| name = strings.ToLower(name) | |||
| target = strings.ToLower(target) | |||
| if name == target { | |||
| return true | |||
| } | |||
| target = strings.Replace(target, " ", "?", -1) | |||
| target = strings.Replace(target, "-", "?", -1) | |||
| for i := range name { | |||
| ch := name[i] | |||
| reqCh := target[i] | |||
| if ch != reqCh { | |||
| if string(reqCh) != "?" { | |||
| return false | |||
| } | |||
| for _, entry := range entries { | |||
| if entry.Type == git.ObjectBlob && entry.Name() == target { | |||
| return entry, nil | |||
| } | |||
| } | |||
| return true | |||
| return nil, nil | |||
| } | |||
| func findWikiRepoCommit(ctx *context.Context) (*git.Repository, *git.Commit, error) { | |||
| wikiRepo, err := git.OpenRepository(ctx.Repo.Repository.WikiPath()) | |||
| if err != nil { | |||
| // ctx.Handle(500, "OpenRepository", err) | |||
| ctx.Handle(500, "OpenRepository", err) | |||
| return nil, nil, err | |||
| } | |||
| if !wikiRepo.IsBranchExist("master") { | |||
| return wikiRepo, nil, nil | |||
| } | |||
| commit, err := wikiRepo.GetBranchCommit("master") | |||
| if err != nil { | |||
| @@ -190,14 +79,40 @@ func findWikiRepoCommit(ctx *context.Context) (*git.Repository, *git.Commit, err | |||
| return wikiRepo, commit, nil | |||
| } | |||
| // wikiContentsByEntry returns the contents of the wiki page referenced by the | |||
| // given tree entry. Writes to ctx if an error occurs. | |||
| func wikiContentsByEntry(ctx *context.Context, entry *git.TreeEntry) []byte { | |||
| reader, err := entry.Blob().Data() | |||
| if err != nil { | |||
| ctx.Handle(500, "Blob.Data", err) | |||
| return nil | |||
| } | |||
| content, err := ioutil.ReadAll(reader) | |||
| if err != nil { | |||
| ctx.Handle(500, "ReadAll", err) | |||
| return nil | |||
| } | |||
| return content | |||
| } | |||
| // wikiContentsByName returns the contents of a wiki page, along with a boolean | |||
| // indicating whether the page exists. Writes to ctx if an error occurs. | |||
| func wikiContentsByName(ctx *context.Context, commit *git.Commit, wikiName string) ([]byte, bool) { | |||
| entry, err := findEntryForFile(commit, models.WikiNameToFilename(wikiName)) | |||
| if err != nil { | |||
| ctx.Handle(500, "findEntryForFile", err) | |||
| return nil, false | |||
| } else if entry == nil { | |||
| return nil, false | |||
| } | |||
| return wikiContentsByEntry(ctx, entry), true | |||
| } | |||
| func renderWikiPage(ctx *context.Context, isViewPage bool) (*git.Repository, *git.TreeEntry) { | |||
| wikiRepo, commit, err := findWikiRepoCommit(ctx) | |||
| if err != nil { | |||
| return nil, nil | |||
| } | |||
| if commit == nil { | |||
| return wikiRepo, nil | |||
| } | |||
| // Get page list. | |||
| if isViewPage { | |||
| @@ -206,85 +121,62 @@ func renderWikiPage(ctx *context.Context, isViewPage bool) (*git.Repository, *gi | |||
| ctx.Handle(500, "ListEntries", err) | |||
| return nil, nil | |||
| } | |||
| pages := []PageMeta{} | |||
| for i := range entries { | |||
| if entries[i].Type == git.ObjectBlob { | |||
| name := entries[i].Name() | |||
| ext := filepath.Ext(name) | |||
| if markdown.IsMarkdownFile(name) || ext == ".textile" { | |||
| name = strings.TrimSuffix(name, ext) | |||
| if name == "" || name == "_Sidebar" || name == "_Footer" || name == "_Header" { | |||
| continue | |||
| } | |||
| pages = append(pages, PageMeta{ | |||
| Name: models.ToWikiPageName(name), | |||
| URL: name, | |||
| }) | |||
| } | |||
| pages := make([]PageMeta, 0, len(entries)) | |||
| for _, entry := range entries { | |||
| if entry.Type != git.ObjectBlob { | |||
| continue | |||
| } | |||
| wikiName, err := models.WikiFilenameToName(entry.Name()) | |||
| if err != nil { | |||
| ctx.Handle(500, "WikiFilenameToName", err) | |||
| return nil, nil | |||
| } else if wikiName == "_Sidebar" || wikiName == "_Footer" { | |||
| continue | |||
| } | |||
| pages = append(pages, PageMeta{ | |||
| Name: wikiName, | |||
| SubURL: models.WikiNameToSubURL(wikiName), | |||
| }) | |||
| } | |||
| ctx.Data["Pages"] = pages | |||
| } | |||
| pageURL := ctx.Params(":page") | |||
| if len(pageURL) == 0 { | |||
| pageURL = "Home" | |||
| pageName := models.NormalizeWikiName(ctx.Params(":page")) | |||
| if len(pageName) == 0 { | |||
| pageName = "Home" | |||
| } | |||
| ctx.Data["PageURL"] = pageURL | |||
| ctx.Data["PageURL"] = models.WikiNameToSubURL(pageName) | |||
| pageName := models.ToWikiPageName(pageURL) | |||
| ctx.Data["old_title"] = pageName | |||
| ctx.Data["Title"] = pageName | |||
| ctx.Data["title"] = pageName | |||
| ctx.Data["RequireHighlightJS"] = true | |||
| pageFilename := models.WikiNameToFilename(pageName) | |||
| var entry *git.TreeEntry | |||
| if entry, err = findFile(wikiRepo, commit, pageName, true); err != nil { | |||
| ctx.Handle(500, "findFile", err) | |||
| if entry, err = findEntryForFile(commit, pageFilename); err != nil { | |||
| ctx.Handle(500, "findEntryForFile", err) | |||
| return nil, nil | |||
| } | |||
| if entry == nil { | |||
| } else if entry == nil { | |||
| ctx.Redirect(ctx.Repo.RepoLink + "/wiki/_pages") | |||
| return nil, nil | |||
| } | |||
| blob := entry.Blob() | |||
| r, err := blob.Data() | |||
| if err != nil { | |||
| ctx.Handle(500, "Data", err) | |||
| return nil, nil | |||
| } | |||
| data, err := ioutil.ReadAll(r) | |||
| if err != nil { | |||
| ctx.Handle(500, "ReadAll", err) | |||
| data := wikiContentsByEntry(ctx, entry) | |||
| if ctx.Written() { | |||
| return nil, nil | |||
| } | |||
| sidebarPresent := false | |||
| sidebarContent := []byte{} | |||
| sentry, err := findFile(wikiRepo, commit, "_Sidebar", true) | |||
| if err == nil && sentry != nil { | |||
| r, err = sentry.Blob().Data() | |||
| if err == nil { | |||
| dataSB, err := ioutil.ReadAll(r) | |||
| if err == nil { | |||
| sidebarPresent = true | |||
| sidebarContent = dataSB | |||
| } | |||
| if isViewPage { | |||
| sidebarContent, sidebarPresent := wikiContentsByName(ctx, commit, "_Sidebar") | |||
| if ctx.Written() { | |||
| return nil, nil | |||
| } | |||
| } | |||
| footerPresent := false | |||
| footerContent := []byte{} | |||
| sentry, err = findFile(wikiRepo, commit, "_Footer", true) | |||
| if err == nil && sentry != nil { | |||
| r, err = sentry.Blob().Data() | |||
| if err == nil { | |||
| dataSB, err := ioutil.ReadAll(r) | |||
| if err == nil { | |||
| footerPresent = true | |||
| footerContent = dataSB | |||
| } | |||
| footerContent, footerPresent := wikiContentsByName(ctx, commit, "_Footer") | |||
| if ctx.Written() { | |||
| return nil, nil | |||
| } | |||
| } | |||
| if isViewPage { | |||
| metas := ctx.Repo.Repository.ComposeMetas() | |||
| ctx.Data["content"] = markdown.RenderWiki(data, ctx.Repo.RepoLink, metas) | |||
| ctx.Data["sidebarPresent"] = sidebarPresent | |||
| @@ -322,13 +214,13 @@ func Wiki(ctx *context.Context) { | |||
| return | |||
| } | |||
| ename := entry.Name() | |||
| if markup.Type(ename) != markdown.MarkupName { | |||
| ext := strings.ToUpper(filepath.Ext(ename)) | |||
| wikiPath := entry.Name() | |||
| if markup.Type(wikiPath) != markdown.MarkupName { | |||
| ext := strings.ToUpper(filepath.Ext(wikiPath)) | |||
| ctx.Data["FormatWarning"] = fmt.Sprintf("%s rendering is not supported at the moment. Rendered as Markdown.", ext) | |||
| } | |||
| // Get last change information. | |||
| lastCommit, err := wikiRepo.GetCommitByPath(ename) | |||
| lastCommit, err := wikiRepo.GetCommitByPath(wikiPath) | |||
| if err != nil { | |||
| ctx.Handle(500, "GetCommitByPath", err) | |||
| return | |||
| @@ -359,27 +251,25 @@ func WikiPages(ctx *context.Context) { | |||
| return | |||
| } | |||
| pages := make([]PageMeta, 0, len(entries)) | |||
| for i := range entries { | |||
| if entries[i].Type == git.ObjectBlob { | |||
| c, err := wikiRepo.GetCommitByPath(entries[i].Name()) | |||
| if err != nil { | |||
| ctx.Handle(500, "GetCommit", err) | |||
| return | |||
| } | |||
| name := entries[i].Name() | |||
| ext := filepath.Ext(name) | |||
| if markdown.IsMarkdownFile(name) || ext == ".textile" { | |||
| name = strings.TrimSuffix(name, ext) | |||
| if name == "" { | |||
| continue | |||
| } | |||
| pages = append(pages, PageMeta{ | |||
| Name: models.ToWikiPageName(name), | |||
| URL: name, | |||
| Updated: c.Author.When, | |||
| }) | |||
| } | |||
| for _, entry := range entries { | |||
| if entry.Type != git.ObjectBlob { | |||
| continue | |||
| } | |||
| c, err := wikiRepo.GetCommitByPath(entry.Name()) | |||
| if err != nil { | |||
| ctx.Handle(500, "GetCommit", err) | |||
| return | |||
| } | |||
| wikiName, err := models.WikiFilenameToName(entry.Name()) | |||
| if err != nil { | |||
| ctx.Handle(500, "WikiFilenameToName", err) | |||
| return | |||
| } | |||
| pages = append(pages, PageMeta{ | |||
| Name: wikiName, | |||
| SubURL: models.WikiNameToSubURL(wikiName), | |||
| Updated: c.Author.When, | |||
| }) | |||
| } | |||
| ctx.Data["Pages"] = pages | |||
| @@ -394,31 +284,23 @@ func WikiRaw(ctx *context.Context) { | |||
| return | |||
| } | |||
| } | |||
| uri := ctx.Params("*") | |||
| providedPath := ctx.Params("*") | |||
| if strings.HasSuffix(providedPath, ".md") { | |||
| providedPath = providedPath[:len(providedPath)-3] | |||
| } | |||
| wikiPath := models.WikiNameToFilename(providedPath) | |||
| var entry *git.TreeEntry | |||
| if commit != nil { | |||
| entry, err = findFile(wikiRepo, commit, uri, false) | |||
| } | |||
| if err != nil || entry == nil { | |||
| if entry == nil || commit == nil { | |||
| defBranch := ctx.Repo.Repository.DefaultBranch | |||
| if commit, err = ctx.Repo.GitRepo.GetBranchCommit(defBranch); commit == nil || err != nil { | |||
| ctx.Handle(500, "GetBranchCommit", err) | |||
| return | |||
| } | |||
| if entry, err = findFile(ctx.Repo.GitRepo, commit, uri, false); err != nil { | |||
| ctx.Handle(500, "findFile", err) | |||
| return | |||
| } | |||
| if entry == nil { | |||
| ctx.Handle(404, "findFile", nil) | |||
| return | |||
| } | |||
| } else { | |||
| ctx.Handle(500, "findFile", err) | |||
| return | |||
| } | |||
| entry, err = findEntryForFile(commit, wikiPath) | |||
| } | |||
| if err != nil { | |||
| ctx.Handle(500, "findFile", err) | |||
| return | |||
| } else if entry == nil { | |||
| ctx.Handle(404, "findEntryForFile", nil) | |||
| return | |||
| } | |||
| if err = ServeBlob(ctx, entry.Blob()); err != nil { | |||
| ctx.Handle(500, "ServeBlob", err) | |||
| } | |||
| @@ -437,7 +319,7 @@ func NewWiki(ctx *context.Context) { | |||
| ctx.HTML(200, tplWikiNew) | |||
| } | |||
| // NewWikiPost response fro wiki create request | |||
| // NewWikiPost response for wiki create request | |||
| func NewWikiPost(ctx *context.Context, form auth.NewWikiForm) { | |||
| ctx.Data["Title"] = ctx.Tr("repo.wiki.new_page") | |||
| ctx.Data["PageIsWiki"] = true | |||
| @@ -448,10 +330,12 @@ func NewWikiPost(ctx *context.Context, form auth.NewWikiForm) { | |||
| return | |||
| } | |||
| wikiPath := models.ToWikiPageURL(form.Title) | |||
| if err := ctx.Repo.Repository.AddWikiPage(ctx.User, wikiPath, form.Content, form.Message); err != nil { | |||
| if models.IsErrWikiAlreadyExist(err) { | |||
| wikiName := models.NormalizeWikiName(form.Title) | |||
| if err := ctx.Repo.Repository.AddWikiPage(ctx.User, wikiName, form.Content, form.Message); err != nil { | |||
| if models.IsErrWikiReservedName(err) { | |||
| ctx.Data["Err_Title"] = true | |||
| ctx.RenderWithErr(ctx.Tr("repo.wiki.reserved_page", wikiName), tplWikiNew, &form) | |||
| } else if models.IsErrWikiAlreadyExist(err) { | |||
| ctx.Data["Err_Title"] = true | |||
| ctx.RenderWithErr(ctx.Tr("repo.wiki.page_already_exists"), tplWikiNew, &form) | |||
| } else { | |||
| @@ -460,7 +344,7 @@ func NewWikiPost(ctx *context.Context, form auth.NewWikiForm) { | |||
| return | |||
| } | |||
| ctx.Redirect(ctx.Repo.RepoLink + "/wiki/" + wikiPath) | |||
| ctx.Redirect(ctx.Repo.RepoLink + "/wiki/" + models.WikiNameToFilename(wikiName)) | |||
| } | |||
| // EditWiki render wiki modify page | |||
| @@ -482,7 +366,7 @@ func EditWiki(ctx *context.Context) { | |||
| ctx.HTML(200, tplWikiNew) | |||
| } | |||
| // EditWikiPost response fro wiki modify request | |||
| // EditWikiPost response for wiki modify request | |||
| func EditWikiPost(ctx *context.Context, form auth.NewWikiForm) { | |||
| ctx.Data["Title"] = ctx.Tr("repo.wiki.new_page") | |||
| ctx.Data["PageIsWiki"] = true | |||
| @@ -493,25 +377,25 @@ func EditWikiPost(ctx *context.Context, form auth.NewWikiForm) { | |||
| return | |||
| } | |||
| oldWikiPath := models.ToWikiPageURL(ctx.Params(":page")) | |||
| newWikiPath := models.ToWikiPageURL(form.Title) | |||
| oldWikiName := models.NormalizeWikiName(ctx.Params(":page")) | |||
| newWikiName := models.NormalizeWikiName(form.Title) | |||
| if err := ctx.Repo.Repository.EditWikiPage(ctx.User, oldWikiPath, newWikiPath, form.Content, form.Message); err != nil { | |||
| if err := ctx.Repo.Repository.EditWikiPage(ctx.User, oldWikiName, newWikiName, form.Content, form.Message); err != nil { | |||
| ctx.Handle(500, "EditWikiPage", err) | |||
| return | |||
| } | |||
| ctx.Redirect(ctx.Repo.RepoLink + "/wiki/" + newWikiPath) | |||
| ctx.Redirect(ctx.Repo.RepoLink + "/wiki/" + models.WikiNameToFilename(newWikiName)) | |||
| } | |||
| // DeleteWikiPagePost delete wiki page | |||
| func DeleteWikiPagePost(ctx *context.Context) { | |||
| pageURL := models.ToWikiPageURL(ctx.Params(":page")) | |||
| if len(pageURL) == 0 { | |||
| pageURL = "Home" | |||
| wikiName := models.NormalizeWikiName(ctx.Params(":page")) | |||
| if len(wikiName) == 0 { | |||
| wikiName = "Home" | |||
| } | |||
| if err := ctx.Repo.Repository.DeleteWikiPage(ctx.User, pageURL); err != nil { | |||
| if err := ctx.Repo.Repository.DeleteWikiPage(ctx.User, wikiName); err != nil { | |||
| ctx.Handle(500, "DeleteWikiPage", err) | |||
| return | |||
| } | |||
| @@ -608,7 +608,6 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
| m.Group("/wiki", func() { | |||
| m.Get("/raw/*", repo.WikiRaw) | |||
| m.Get("/*", repo.WikiRaw) | |||
| }, repo.MustEnableWiki) | |||
| m.Group("/activity", func() { | |||
| @@ -16,7 +16,7 @@ | |||
| <tr> | |||
| <td> | |||
| <i class="octicon octicon-file-text"></i> | |||
| <a href="{{$.RepoLink}}/wiki/{{.URL}}">{{.Name}}</a> | |||
| <a href="{{$.RepoLink}}/wiki/{{.SubURL}}">{{.Name}}</a> | |||
| </td> | |||
| {{$timeSince := TimeSince .Updated $.Lang}} | |||
| <td class="text right grey">{{$.i18n.Tr "repo.wiki.last_updated" $timeSince | Safe}}</td> | |||
| @@ -21,7 +21,7 @@ | |||
| </div> | |||
| <div class="scrolling menu"> | |||
| {{range .Pages}} | |||
| <div class="item {{if eq $.Title .Name}}selected{{end}}" data-url="{{$.RepoLink}}/wiki/{{.URL}}">{{.Name}}</div> | |||
| <div class="item {{if eq $.Title .Name}}selected{{end}}" data-url="{{$.RepoLink}}/wiki/{{.SubURL}}">{{.Name}}</div> | |||
| {{end}} | |||
| </div> | |||
| </div> | |||
| @@ -57,9 +57,9 @@ | |||
| {{$title}} | |||
| {{if and .IsRepositoryWriter (not .Repository.IsMirror)}} | |||
| <div class="ui right"> | |||
| <a class="ui small button" href="{{.RepoLink}}/wiki/{{EscapePound .PageURL}}/_edit">{{.i18n.Tr "repo.wiki.edit_page_button"}}</a> | |||
| <a class="ui small button" href="{{.RepoLink}}/wiki/{{.PageURL}}/_edit">{{.i18n.Tr "repo.wiki.edit_page_button"}}</a> | |||
| <a class="ui green small button" href="{{.RepoLink}}/wiki/_new">{{.i18n.Tr "repo.wiki.new_page_button"}}</a> | |||
| <a class="ui red small button delete-button" href="" data-url="{{.RepoLink}}/wiki/{{EscapePound .PageURL}}/delete" data-id="{{EscapePound .PageURL}}">{{.i18n.Tr "repo.wiki.delete_page_button"}}</a> | |||
| <a class="ui red small button delete-button" href="" data-url="{{.RepoLink}}/wiki/{{.PageURL}}/delete" data-id="{{.PageURL}}">{{.i18n.Tr "repo.wiki.delete_page_button"}}</a> | |||
| </div> | |||
| {{end}} | |||
| <div class="ui sub header"> | |||
| @@ -85,7 +85,7 @@ | |||
| {{end}} | |||
| </div> | |||
| {{if .footerPresent}} | |||
| <div class="ui grey segment"> | |||
| <div class="ui segment"> | |||
| {{.footerContent | Str2html}} | |||
| </div> | |||
| {{end}} | |||
| @@ -41,6 +41,11 @@ func (f StrTo) Int64() (int64, error) { | |||
| return int64(v), err | |||
| } | |||
| func (f StrTo) Float64() (float64, error) { | |||
| v, err := strconv.ParseFloat(f.String(), 64) | |||
| return float64(v), err | |||
| } | |||
| func (f StrTo) MustUint8() uint8 { | |||
| v, _ := f.Uint8() | |||
| return v | |||
| @@ -56,6 +61,11 @@ func (f StrTo) MustInt64() int64 { | |||
| return v | |||
| } | |||
| func (f StrTo) MustFloat64() float64 { | |||
| v, _ := f.Float64() | |||
| return v | |||
| } | |||
| func (f StrTo) String() string { | |||
| if f.Exist() { | |||
| return string(f) | |||
| @@ -19,9 +19,7 @@ import ( | |||
| "crypto/aes" | |||
| "crypto/cipher" | |||
| "crypto/rand" | |||
| "encoding/base64" | |||
| "errors" | |||
| "io" | |||
| r "math/rand" | |||
| "strconv" | |||
| "strings" | |||
| @@ -30,41 +28,53 @@ import ( | |||
| "unicode/utf8" | |||
| ) | |||
| // AESEncrypt encrypts text and given key with AES. | |||
| func AESEncrypt(key, text []byte) ([]byte, error) { | |||
| // AESGCMEncrypt encrypts plaintext with the given key using AES in GCM mode. | |||
| func AESGCMEncrypt(key, plaintext []byte) ([]byte, error) { | |||
| block, err := aes.NewCipher(key) | |||
| if err != nil { | |||
| return nil, err | |||
| } | |||
| b := base64.StdEncoding.EncodeToString(text) | |||
| ciphertext := make([]byte, aes.BlockSize+len(b)) | |||
| iv := ciphertext[:aes.BlockSize] | |||
| if _, err := io.ReadFull(rand.Reader, iv); err != nil { | |||
| gcm, err := cipher.NewGCM(block) | |||
| if err != nil { | |||
| return nil, err | |||
| } | |||
| cfb := cipher.NewCFBEncrypter(block, iv) | |||
| cfb.XORKeyStream(ciphertext[aes.BlockSize:], []byte(b)) | |||
| return ciphertext, nil | |||
| nonce := make([]byte, gcm.NonceSize()) | |||
| if _, err := rand.Read(nonce); err != nil { | |||
| return nil, err | |||
| } | |||
| ciphertext := gcm.Seal(nil, nonce, plaintext, nil) | |||
| return append(nonce, ciphertext...), nil | |||
| } | |||
| // AESDecrypt decrypts text and given key with AES. | |||
| func AESDecrypt(key, text []byte) ([]byte, error) { | |||
| // AESGCMDecrypt decrypts ciphertext with the given key using AES in GCM mode. | |||
| func AESGCMDecrypt(key, ciphertext []byte) ([]byte, error) { | |||
| block, err := aes.NewCipher(key) | |||
| if err != nil { | |||
| return nil, err | |||
| } | |||
| if len(text) < aes.BlockSize { | |||
| return nil, errors.New("ciphertext too short") | |||
| gcm, err := cipher.NewGCM(block) | |||
| if err != nil { | |||
| return nil, err | |||
| } | |||
| size := gcm.NonceSize() | |||
| if len(ciphertext)-size <= 0 { | |||
| return nil, errors.New("Ciphertext is empty") | |||
| } | |||
| iv := text[:aes.BlockSize] | |||
| text = text[aes.BlockSize:] | |||
| cfb := cipher.NewCFBDecrypter(block, iv) | |||
| cfb.XORKeyStream(text, text) | |||
| data, err := base64.StdEncoding.DecodeString(string(text)) | |||
| nonce := ciphertext[:size] | |||
| ciphertext = ciphertext[size:] | |||
| plainText, err := gcm.Open(nil, nonce, ciphertext, nil) | |||
| if err != nil { | |||
| return nil, err | |||
| } | |||
| return data, nil | |||
| return plainText, nil | |||
| } | |||
| // IsLetter returns true if the 'l' is an English letter. | |||
| @@ -176,7 +176,7 @@ recommend that a file or class name and description of purpose be included on | |||
| the same "printed page" as the copyright notice for easier identification within | |||
| third-party archives. | |||
| Copyright [yyyy] [name of copyright owner] | |||
| Copyright 2014 The Macaron Authors | |||
| Licensed under the Apache License, Version 2.0 (the "License"); | |||
| you may not use this file except in compliance with the License. | |||
| @@ -1,4 +1,4 @@ | |||
| Macaron [](https://travis-ci.org/go-macaron/macaron) [](http://gocover.io/github.com/go-macaron/macaron) | |||
| Macaron [](https://travis-ci.org/go-macaron/macaron) | |||
| ======================= | |||
|  | |||
| @@ -61,18 +61,20 @@ There are already many [middlewares](https://github.com/go-macaron) to simplify | |||
| - [bindata](https://github.com/go-macaron/bindata) - Embed binary data as static and template files | |||
| - [toolbox](https://github.com/go-macaron/toolbox) - Health check, pprof, profile and statistic services | |||
| - [oauth2](https://github.com/go-macaron/oauth2) - OAuth 2.0 backend | |||
| - [authz](https://github.com/go-macaron/authz) - ACL/RBAC/ABAC authorization based on Casbin | |||
| - [switcher](https://github.com/go-macaron/switcher) - Multiple-site support | |||
| - [method](https://github.com/go-macaron/method) - HTTP method override | |||
| - [permissions2](https://github.com/xyproto/permissions2) - Cookies, users and permissions | |||
| - [renders](https://github.com/go-macaron/renders) - Beego-like render engine(Macaron has built-in template engine, this is another option) | |||
| - [piwik](https://github.com/veecue/piwik-middleware) - Server-side piwik analytics | |||
| ## Use Cases | |||
| - [Gogs](https://gogs.io): A painless self-hosted Git Service | |||
| - [Grafana](http://grafana.org/): The open platform for beautiful analytics and monitoring | |||
| - [Peach](https://peachdocs.org): A modern web documentation server | |||
| - [Go Walker](https://gowalker.org): Go online API documentation | |||
| - [Switch](https://gopm.io): Gopm registry | |||
| - [YouGam](http://yougam.com): Online Forum | |||
| - [Critical Stack Intel](https://intel.criticalstack.com/): A 100% free intel marketplace from Critical Stack, Inc. | |||
| ## Getting Help | |||
| @@ -80,7 +82,6 @@ There are already many [middlewares](https://github.com/go-macaron) to simplify | |||
| - [API Reference](https://gowalker.org/gopkg.in/macaron.v1) | |||
| - [Documentation](https://go-macaron.com) | |||
| - [FAQs](https://go-macaron.com/docs/faqs) | |||
| - [](https://gitter.im/go-macaron/macaron?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) | |||
| ## Credits | |||
| @@ -15,7 +15,7 @@ | |||
| package macaron | |||
| import ( | |||
| "crypto/md5" | |||
| "crypto/sha256" | |||
| "encoding/hex" | |||
| "html/template" | |||
| "io" | |||
| @@ -32,8 +32,8 @@ import ( | |||
| "time" | |||
| "github.com/Unknwon/com" | |||
| "github.com/go-macaron/inject" | |||
| "golang.org/x/crypto/pbkdf2" | |||
| ) | |||
| // Locale reprents a localization interface. | |||
| @@ -72,6 +72,14 @@ func (r *Request) Body() *RequestBody { | |||
| return &RequestBody{r.Request.Body} | |||
| } | |||
| // ContextInvoker is an inject.FastInvoker wrapper of func(ctx *Context). | |||
| type ContextInvoker func(ctx *Context) | |||
| func (invoke ContextInvoker) Invoke(params []interface{}) ([]reflect.Value, error) { | |||
| invoke(params[0].(*Context)) | |||
| return nil, nil | |||
| } | |||
| // Context represents the runtime context of current request of Macaron instance. | |||
| // It is the integration of most frequently used middlewares and helper methods. | |||
| type Context struct { | |||
| @@ -260,6 +268,11 @@ func (ctx *Context) SetParams(name, val string) { | |||
| ctx.params[name] = val | |||
| } | |||
| // ReplaceAllParams replace all current params with given params | |||
| func (ctx *Context) ReplaceAllParams(params Params) { | |||
| ctx.params = params; | |||
| } | |||
| // ParamsEscape returns escapred params result. | |||
| // e.g. ctx.ParamsEscape(":uname") | |||
| func (ctx *Context) ParamsEscape(name string) string { | |||
| @@ -411,30 +424,29 @@ func (ctx *Context) GetSecureCookie(key string) (string, bool) { | |||
| // SetSuperSecureCookie sets given cookie value to response header with secret string. | |||
| func (ctx *Context) SetSuperSecureCookie(secret, name, value string, others ...interface{}) { | |||
| m := md5.Sum([]byte(secret)) | |||
| secret = hex.EncodeToString(m[:]) | |||
| text, err := com.AESEncrypt([]byte(secret), []byte(value)) | |||
| key := pbkdf2.Key([]byte(secret), []byte(secret), 1000, 16, sha256.New) | |||
| text, err := com.AESGCMEncrypt(key, []byte(value)) | |||
| if err != nil { | |||
| panic("error encrypting cookie: " + err.Error()) | |||
| } | |||
| ctx.SetCookie(name, hex.EncodeToString(text), others...) | |||
| } | |||
| // GetSuperSecureCookie returns given cookie value from request header with secret string. | |||
| func (ctx *Context) GetSuperSecureCookie(secret, key string) (string, bool) { | |||
| val := ctx.GetCookie(key) | |||
| func (ctx *Context) GetSuperSecureCookie(secret, name string) (string, bool) { | |||
| val := ctx.GetCookie(name) | |||
| if val == "" { | |||
| return "", false | |||
| } | |||
| data, err := hex.DecodeString(val) | |||
| text, err := hex.DecodeString(val) | |||
| if err != nil { | |||
| return "", false | |||
| } | |||
| m := md5.Sum([]byte(secret)) | |||
| secret = hex.EncodeToString(m[:]) | |||
| text, err := com.AESDecrypt([]byte(secret), data) | |||
| key := pbkdf2.Key([]byte(secret), []byte(secret), 1000, 16, sha256.New) | |||
| text, err = com.AESGCMDecrypt(key, text) | |||
| return string(text), err == nil | |||
| } | |||
| @@ -19,6 +19,7 @@ import ( | |||
| "fmt" | |||
| "log" | |||
| "net/http" | |||
| "reflect" | |||
| "runtime" | |||
| "time" | |||
| ) | |||
| @@ -32,6 +33,14 @@ func init() { | |||
| ColorLog = runtime.GOOS != "windows" | |||
| } | |||
| // LoggerInvoker is an inject.FastInvoker wrapper of func(ctx *Context, log *log.Logger). | |||
| type LoggerInvoker func(ctx *Context, log *log.Logger) | |||
| func (invoke LoggerInvoker) Invoke(params []interface{}) ([]reflect.Value, error) { | |||
| invoke(params[0].(*Context), params[1].(*log.Logger)) | |||
| return nil, nil | |||
| } | |||
| // Logger returns a middleware handler that logs the request as it goes in and the response as it goes out. | |||
| func Logger() Handler { | |||
| return func(ctx *Context, log *log.Logger) { | |||
| @@ -42,7 +51,7 @@ func Logger() Handler { | |||
| rw := ctx.Resp.(ResponseWriter) | |||
| ctx.Next() | |||
| content := fmt.Sprintf("%s: Completed %s %v %s in %v", time.Now().Format(LogTimeFormat), ctx.Req.RequestURI, rw.Status(), http.StatusText(rw.Status()), time.Since(start)) | |||
| content := fmt.Sprintf("%s: Completed %s %s %v %s in %v", time.Now().Format(LogTimeFormat), ctx.Req.Method, ctx.Req.RequestURI, rw.Status(), http.StatusText(rw.Status()), time.Since(start)) | |||
| if ColorLog { | |||
| switch rw.Status() { | |||
| case 200, 201, 202: | |||
| @@ -32,7 +32,7 @@ import ( | |||
| "github.com/go-macaron/inject" | |||
| ) | |||
| const _VERSION = "1.1.12.0122" | |||
| const _VERSION = "1.2.4.1123" | |||
| func Version() string { | |||
| return _VERSION | |||
| @@ -43,20 +43,63 @@ func Version() string { | |||
| // and panics if an argument could not be fullfilled via dependency injection. | |||
| type Handler interface{} | |||
| // validateHandler makes sure a handler is a callable function, | |||
| // and panics if it is not. | |||
| func validateHandler(h Handler) { | |||
| // handlerFuncInvoker is an inject.FastInvoker wrapper of func(http.ResponseWriter, *http.Request). | |||
| type handlerFuncInvoker func(http.ResponseWriter, *http.Request) | |||
| func (invoke handlerFuncInvoker) Invoke(params []interface{}) ([]reflect.Value, error) { | |||
| invoke(params[0].(http.ResponseWriter), params[1].(*http.Request)) | |||
| return nil, nil | |||
| } | |||
| // internalServerErrorInvoker is an inject.FastInvoker wrapper of func(rw http.ResponseWriter, err error). | |||
| type internalServerErrorInvoker func(rw http.ResponseWriter, err error) | |||
| func (invoke internalServerErrorInvoker) Invoke(params []interface{}) ([]reflect.Value, error) { | |||
| invoke(params[0].(http.ResponseWriter), params[1].(error)) | |||
| return nil, nil | |||
| } | |||
| // validateAndWrapHandler makes sure a handler is a callable function, it panics if not. | |||
| // When the handler is also potential to be any built-in inject.FastInvoker, | |||
| // it wraps the handler automatically to have some performance gain. | |||
| func validateAndWrapHandler(h Handler) Handler { | |||
| if reflect.TypeOf(h).Kind() != reflect.Func { | |||
| panic("Macaron handler must be a callable function") | |||
| } | |||
| if !inject.IsFastInvoker(h) { | |||
| switch v := h.(type) { | |||
| case func(*Context): | |||
| return ContextInvoker(v) | |||
| case func(*Context, *log.Logger): | |||
| return LoggerInvoker(v) | |||
| case func(http.ResponseWriter, *http.Request): | |||
| return handlerFuncInvoker(v) | |||
| case func(http.ResponseWriter, error): | |||
| return internalServerErrorInvoker(v) | |||
| } | |||
| } | |||
| return h | |||
| } | |||
| // validateHandlers makes sure handlers are callable functions, | |||
| // and panics if any of them is not. | |||
| func validateHandlers(handlers []Handler) { | |||
| for _, h := range handlers { | |||
| validateHandler(h) | |||
| // validateAndWrapHandlers preforms validation and wrapping for each input handler. | |||
| // It accepts an optional wrapper function to perform custom wrapping on handlers. | |||
| func validateAndWrapHandlers(handlers []Handler, wrappers ...func(Handler) Handler) []Handler { | |||
| var wrapper func(Handler) Handler | |||
| if len(wrappers) > 0 { | |||
| wrapper = wrappers[0] | |||
| } | |||
| wrappedHandlers := make([]Handler, len(handlers)) | |||
| for i, h := range handlers { | |||
| h = validateAndWrapHandler(h) | |||
| if wrapper != nil && !inject.IsFastInvoker(h) { | |||
| h = wrapper(h) | |||
| } | |||
| wrappedHandlers[i] = h | |||
| } | |||
| return wrappedHandlers | |||
| } | |||
| // Macaron represents the top level web application. | |||
| @@ -101,7 +144,7 @@ func New() *Macaron { | |||
| } | |||
| // Classic creates a classic Macaron with some basic default middleware: | |||
| // mocaron.Logger, mocaron.Recovery and mocaron.Static. | |||
| // macaron.Logger, macaron.Recovery and macaron.Static. | |||
| func Classic() *Macaron { | |||
| m := New() | |||
| m.Use(Logger()) | |||
| @@ -123,7 +166,7 @@ func (m *Macaron) Handlers(handlers ...Handler) { | |||
| // Action sets the handler that will be called after all the middleware has been invoked. | |||
| // This is set to macaron.Router in a macaron.Classic(). | |||
| func (m *Macaron) Action(handler Handler) { | |||
| validateHandler(handler) | |||
| handler = validateAndWrapHandler(handler) | |||
| m.action = handler | |||
| } | |||
| @@ -139,7 +182,7 @@ func (m *Macaron) Before(handler BeforeHandler) { | |||
| // and panics if the handler is not a callable func. | |||
| // Middleware Handlers are invoked in the order that they are added. | |||
| func (m *Macaron) Use(handler Handler) { | |||
| validateHandler(handler) | |||
| handler = validateAndWrapHandler(handler) | |||
| m.handlers = append(m.handlers, handler) | |||
| } | |||
| @@ -82,6 +82,9 @@ type Router struct { | |||
| groups []group | |||
| notFound http.HandlerFunc | |||
| internalServerError func(*Context, error) | |||
| // handlerWrapper is used to wrap arbitrary function from Handler to inject.FastInvoker. | |||
| handlerWrapper func(Handler) Handler | |||
| } | |||
| func NewRouter() *Router { | |||
| @@ -115,7 +118,7 @@ func (r *Route) Name(name string) { | |||
| if len(name) == 0 { | |||
| panic("route name cannot be empty") | |||
| } else if r.router.namedRoutes[name] != nil { | |||
| panic("route with given name already exists") | |||
| panic("route with given name already exists: " + name) | |||
| } | |||
| r.router.namedRoutes[name] = r.leaf | |||
| } | |||
| @@ -173,7 +176,7 @@ func (r *Router) Handle(method string, pattern string, handlers []Handler) *Rout | |||
| h = append(h, handlers...) | |||
| handlers = h | |||
| } | |||
| validateHandlers(handlers) | |||
| handlers = validateAndWrapHandlers(handlers, r.handlerWrapper) | |||
| return r.handle(method, pattern, func(resp http.ResponseWriter, req *http.Request, params Params) { | |||
| c := r.m.createContext(resp, req) | |||
| @@ -251,11 +254,11 @@ func (r *Router) Combo(pattern string, h ...Handler) *ComboRouter { | |||
| return &ComboRouter{r, pattern, h, map[string]bool{}, nil} | |||
| } | |||
| // Configurable http.HandlerFunc which is called when no matching route is | |||
| // NotFound configurates http.HandlerFunc which is called when no matching route is | |||
| // found. If it is not set, http.NotFound is used. | |||
| // Be sure to set 404 response code in your handler. | |||
| func (r *Router) NotFound(handlers ...Handler) { | |||
| validateHandlers(handlers) | |||
| handlers = validateAndWrapHandlers(handlers) | |||
| r.notFound = func(rw http.ResponseWriter, req *http.Request) { | |||
| c := r.m.createContext(rw, req) | |||
| c.handlers = make([]Handler, 0, len(r.m.handlers)+len(handlers)) | |||
| @@ -265,11 +268,11 @@ func (r *Router) NotFound(handlers ...Handler) { | |||
| } | |||
| } | |||
| // Configurable handler which is called when route handler returns | |||
| // InternalServerError configurates handler which is called when route handler returns | |||
| // error. If it is not set, default handler is used. | |||
| // Be sure to set 500 response code in your handler. | |||
| func (r *Router) InternalServerError(handlers ...Handler) { | |||
| validateHandlers(handlers) | |||
| handlers = validateAndWrapHandlers(handlers) | |||
| r.internalServerError = func(c *Context, err error) { | |||
| c.index = 0 | |||
| c.handlers = handlers | |||
| @@ -278,9 +281,21 @@ func (r *Router) InternalServerError(handlers ...Handler) { | |||
| } | |||
| } | |||
| // SetHandlerWrapper sets handlerWrapper for the router. | |||
| func (r *Router) SetHandlerWrapper(f func(Handler) Handler) { | |||
| r.handlerWrapper = f | |||
| } | |||
| func (r *Router) ServeHTTP(rw http.ResponseWriter, req *http.Request) { | |||
| if t, ok := r.routers[req.Method]; ok { | |||
| h, p, ok := t.Match(req.URL.Path) | |||
| // Fast match for static routes | |||
| leaf := r.getLeaf(req.Method, req.URL.Path) | |||
| if leaf != nil { | |||
| leaf.handle(rw, req, nil) | |||
| return | |||
| } | |||
| h, p, ok := t.Match(req.URL.EscapedPath()) | |||
| if ok { | |||
| if splat, ok := p["*0"]; ok { | |||
| p["*"] = splat // Easy name. | |||
| @@ -261,6 +261,10 @@ func (t *Tree) Add(pattern string, handle Handle) *Leaf { | |||
| } | |||
| func (t *Tree) matchLeaf(globLevel int, url string, params Params) (Handle, bool) { | |||
| url, err := PathUnescape(url) | |||
| if err != nil { | |||
| return nil, false | |||
| } | |||
| for i := 0; i < len(t.leaves); i++ { | |||
| switch t.leaves[i].typ { | |||
| case _PATTERN_STATIC: | |||
| @@ -300,16 +304,20 @@ func (t *Tree) matchLeaf(globLevel int, url string, params Params) (Handle, bool | |||
| } | |||
| func (t *Tree) matchSubtree(globLevel int, segment, url string, params Params) (Handle, bool) { | |||
| unescapedSegment, err := PathUnescape(segment) | |||
| if err != nil { | |||
| return nil, false | |||
| } | |||
| for i := 0; i < len(t.subtrees); i++ { | |||
| switch t.subtrees[i].typ { | |||
| case _PATTERN_STATIC: | |||
| if t.subtrees[i].pattern == segment { | |||
| if t.subtrees[i].pattern == unescapedSegment { | |||
| if handle, ok := t.subtrees[i].matchNextSegment(globLevel, url, params); ok { | |||
| return handle, true | |||
| } | |||
| } | |||
| case _PATTERN_REGEXP: | |||
| results := t.subtrees[i].reg.FindStringSubmatch(segment) | |||
| results := t.subtrees[i].reg.FindStringSubmatch(unescapedSegment) | |||
| if len(results)-1 != len(t.subtrees[i].wildcards) { | |||
| break | |||
| } | |||
| @@ -322,12 +330,12 @@ func (t *Tree) matchSubtree(globLevel int, segment, url string, params Params) ( | |||
| } | |||
| case _PATTERN_HOLDER: | |||
| if handle, ok := t.subtrees[i].matchNextSegment(globLevel+1, url, params); ok { | |||
| params[t.subtrees[i].wildcards[0]] = segment | |||
| params[t.subtrees[i].wildcards[0]] = unescapedSegment | |||
| return handle, true | |||
| } | |||
| case _PATTERN_MATCH_ALL: | |||
| if handle, ok := t.subtrees[i].matchNextSegment(globLevel+1, url, params); ok { | |||
| params["*"+com.ToStr(globLevel)] = segment | |||
| params["*"+com.ToStr(globLevel)] = unescapedSegment | |||
| return handle, true | |||
| } | |||
| } | |||
| @@ -335,19 +343,22 @@ func (t *Tree) matchSubtree(globLevel int, segment, url string, params Params) ( | |||
| if len(t.leaves) > 0 { | |||
| leaf := t.leaves[len(t.leaves)-1] | |||
| unescapedURL, err := PathUnescape(segment + "/" + url) | |||
| if err != nil { | |||
| return nil, false | |||
| } | |||
| if leaf.typ == _PATTERN_PATH_EXT { | |||
| url = segment + "/" + url | |||
| j := strings.LastIndex(url, ".") | |||
| j := strings.LastIndex(unescapedURL, ".") | |||
| if j > -1 { | |||
| params[":path"] = url[:j] | |||
| params[":ext"] = url[j+1:] | |||
| params[":path"] = unescapedURL[:j] | |||
| params[":ext"] = unescapedURL[j+1:] | |||
| } else { | |||
| params[":path"] = url | |||
| params[":path"] = unescapedURL | |||
| } | |||
| return leaf.handle, true | |||
| } else if leaf.typ == _PATTERN_MATCH_ALL { | |||
| params["*"] = segment + "/" + url | |||
| params["*"+com.ToStr(globLevel)] = segment + "/" + url | |||
| params["*"] = unescapedURL | |||
| params["*"+com.ToStr(globLevel)] = unescapedURL | |||
| return leaf.handle, true | |||
| } | |||
| } | |||
| @@ -0,0 +1,25 @@ | |||
| // +build !go1.8 | |||
| // Copyright 2017 The Macaron Authors | |||
| // | |||
| // Licensed under the Apache License, Version 2.0 (the "License"): you may | |||
| // not use this file except in compliance with the License. You may obtain | |||
| // a copy of the License at | |||
| // | |||
| // http://www.apache.org/licenses/LICENSE-2.0 | |||
| // | |||
| // Unless required by applicable law or agreed to in writing, software | |||
| // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT | |||
| // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the | |||
| // License for the specific language governing permissions and limitations | |||
| // under the License. | |||
| package macaron | |||
| import "net/url" | |||
| // PathUnescape unescapes a path. Ideally, this function would use | |||
| // url.PathUnescape(..), but the function was not introduced until go1.8. | |||
| func PathUnescape(s string) (string, error) { | |||
| return url.QueryUnescape(s) | |||
| } | |||
| @@ -0,0 +1,24 @@ | |||
| // +build go1.8 | |||
| // Copyright 2017 The Macaron Authors | |||
| // | |||
| // Licensed under the Apache License, Version 2.0 (the "License"): you may | |||
| // not use this file except in compliance with the License. You may obtain | |||
| // a copy of the License at | |||
| // | |||
| // http://www.apache.org/licenses/LICENSE-2.0 | |||
| // | |||
| // Unless required by applicable law or agreed to in writing, software | |||
| // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT | |||
| // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the | |||
| // License for the specific language governing permissions and limitations | |||
| // under the License. | |||
| package macaron | |||
| import "net/url" | |||
| // PathUnescape unescapes a path. | |||
| func PathUnescape(s string) (string, error) { | |||
| return url.PathUnescape(s) | |||
| } | |||
| @@ -33,10 +33,10 @@ | |||
| "revisionTime": "2016-07-15T03:28:08Z" | |||
| }, | |||
| { | |||
| "checksumSHA1": "ly9VLPE9GKo2U7mnbZyjb2LDQ3w=", | |||
| "checksumSHA1": "IrtvVIFBTQmk0+vM7g2xtka5SFg=", | |||
| "path": "github.com/Unknwon/com", | |||
| "revision": "28b053d5a2923b87ce8c5a08f3af779894a72758", | |||
| "revisionTime": "2015-10-08T13:54:07Z" | |||
| "revision": "7677a1d7c1137cd3dd5ba7a076d0c898a1ef4520", | |||
| "revisionTime": "2017-08-19T22:39:52Z" | |||
| }, | |||
| { | |||
| "checksumSHA1": "GwPkXd1UL3D7F3IuHHM+V0r4MB4=", | |||
| @@ -1508,10 +1508,10 @@ | |||
| "revisionTime": "2016-08-08T14:54:09Z" | |||
| }, | |||
| { | |||
| "checksumSHA1": "u1dW5zfo2SWot04r5cL8dTbmtcc=", | |||
| "checksumSHA1": "VJKlO1AEWQivq2S4DvdmAJU2Fvs=", | |||
| "path": "gopkg.in/macaron.v1", | |||
| "revision": "aa6b7ee41a182898a33d798c655df1cac9d2230b", | |||
| "revisionTime": "2017-01-22T14:42:53Z" | |||
| "revision": "75f2e9b42e99652f0d82b28ccb73648f44615faa", | |||
| "revisionTime": "2017-11-24T00:20:17Z" | |||
| }, | |||
| { | |||
| "checksumSHA1": "6QPjE+qflEBHg+JPJd9e4iQuRAk=", | |||