-
-
Save dperini/729294 to your computer and use it in GitHub Desktop.
// | |
// Regular Expression for URL validation | |
// | |
// Author: Diego Perini | |
// Created: 2010/12/05 | |
// Updated: 2018/09/12 | |
// License: MIT | |
// | |
// Copyright (c) 2010-2018 Diego Perini (http://www.iport.it) | |
// | |
// Permission is hereby granted, free of charge, to any person | |
// obtaining a copy of this software and associated documentation | |
// files (the "Software"), to deal in the Software without | |
// restriction, including without limitation the rights to use, | |
// copy, modify, merge, publish, distribute, sublicense, and/or sell | |
// copies of the Software, and to permit persons to whom the | |
// Software is furnished to do so, subject to the following | |
// conditions: | |
// | |
// The above copyright notice and this permission notice shall be | |
// included in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, | |
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES | |
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND | |
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT | |
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, | |
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING | |
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR | |
// OTHER DEALINGS IN THE SOFTWARE. | |
// | |
// the regular expression composed & commented | |
// could be easily tweaked for RFC compliance, | |
// it was expressly modified to fit & satisfy | |
// these test for an URL shortener: | |
// | |
// http://mathiasbynens.be/demo/url-regex | |
// | |
// Notes on possible differences from a standard/generic validation: | |
// | |
// - utf-8 char class take in consideration the full Unicode range | |
// - TLDs have been made mandatory so single names like "localhost" fails | |
// - protocols have been restricted to ftp, http and https only as requested | |
// | |
// Changes: | |
// | |
// - IP address dotted notation validation, range: 1.0.0.0 - 223.255.255.255 | |
// first and last IP address of each class is considered invalid | |
// (since they are broadcast/network addresses) | |
// | |
// - Added exclusion of private, reserved and/or local networks ranges | |
// - Made starting path slash optional (http://example.com?foo=bar) | |
// - Allow a dot (.) at the end of hostnames (http://example.com.) | |
// - Allow an underscore (_) character in host/domain names | |
// - Check dot delimited parts length and total length | |
// - Made protocol optional, allowed short syntax // | |
// | |
// Compressed one-line versions: | |
// | |
// Javascript regex version | |
// | |
// /^(?:(?:(?:https?|ftp):)?\/\/)(?:\S+(?::\S*)?@)?(?:(?!(?:10|127)(?:\.\d{1,3}){3})(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))|(?:(?:[a-z0-9\u00a1-\uffff][a-z0-9\u00a1-\uffff_-]{0,62})?[a-z0-9\u00a1-\uffff]\.)+(?:[a-z\u00a1-\uffff]{2,}\.?))(?::\d{2,5})?(?:[/?#]\S*)?$/i | |
// | |
// PHP version (uses % symbol as delimiter) | |
// | |
// %^(?:(?:(?:https?|ftp):)?\/\/)(?:\S+(?::\S*)?@)?(?:(?!(?:10|127)(?:\.\d{1,3}){3})(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))|(?:(?:[a-z0-9\x{00a1}-\x{ffff}][a-z0-9\x{00a1}-\x{ffff}_-]{0,62})?[a-z0-9\x{00a1}-\x{ffff}]\.)+(?:[a-z\x{00a1}-\x{ffff}]{2,}\.?))(?::\d{2,5})?(?:[/?#]\S*)?$%iuS | |
// | |
var re_weburl = new RegExp( | |
"^" + | |
// protocol identifier (optional) | |
// short syntax // still required | |
"(?:(?:(?:https?|ftp):)?\\/\\/)" + | |
// user:pass BasicAuth (optional) | |
"(?:\\S+(?::\\S*)?@)?" + | |
"(?:" + | |
// IP address exclusion | |
// private & local networks | |
"(?!(?:10|127)(?:\\.\\d{1,3}){3})" + | |
"(?!(?:169\\.254|192\\.168)(?:\\.\\d{1,3}){2})" + | |
"(?!172\\.(?:1[6-9]|2\\d|3[0-1])(?:\\.\\d{1,3}){2})" + | |
// IP address dotted notation octets | |
// excludes loopback network 0.0.0.0 | |
// excludes reserved space >= 224.0.0.0 | |
// excludes network & broadcast addresses | |
// (first & last IP address of each class) | |
"(?:[1-9]\\d?|1\\d\\d|2[01]\\d|22[0-3])" + | |
"(?:\\.(?:1?\\d{1,2}|2[0-4]\\d|25[0-5])){2}" + | |
"(?:\\.(?:[1-9]\\d?|1\\d\\d|2[0-4]\\d|25[0-4]))" + | |
"|" + | |
// host & domain names, may end with dot | |
// can be replaced by a shortest alternative | |
// (?![-_])(?:[-\\w\\u00a1-\\uffff]{0,63}[^-_]\\.)+ | |
"(?:" + | |
"(?:" + | |
"[a-z0-9\\u00a1-\\uffff]" + | |
"[a-z0-9\\u00a1-\\uffff_-]{0,62}" + | |
")?" + | |
"[a-z0-9\\u00a1-\\uffff]\\." + | |
")+" + | |
// TLD identifier name, may end with dot | |
"(?:[a-z\\u00a1-\\uffff]{2,}\\.?)" + | |
")" + | |
// port number (optional) | |
"(?::\\d{2,5})?" + | |
// resource path (optional) | |
"(?:[/?#]\\S*)?" + | |
"$", "i" | |
); |
@avalanche1,
a trailing dot in a domain name is perfectly valid and accepted by all browsers not just chrome.
Also checkout the other linked posts outlined above by ddelange (there are more quirks about it).
There's a bug here that doesn't match those URL:
http://www.91xingche.com/img/qk[1].jpg
http://www.4j/images/iepng/iepngfix.htc
http://api.safebox.360.cn/Interface/getToolsDataApi/guid/{B7800CD1-75EE-41D3-9CFC-7E9B051B84AD}
@FANGOD,
only one does not match. https://regex101.com/r/dZBcOS/1
Previously discussed punycode TLDs aside, the hypothetical www.4j
above raises the question whether numbers in TLDs should be allowed by the regex (although there are currently none in the Public Suffix List)?
Accepts a trailing period
http://x.comddfsdfsdf.
Accepts a trailing period
http://x.comddfsdfsdf.
Up four comments: https://gist.github.com/dperini/729294#gistcomment-3623271
Regarding the trailing period. For plain TLDs that’s totally fine. However, what about paths? Lets say you want to use this regex to link URLs in a given Text like this one:
For further info about matching URLs, visit https://gist.github.com/dperini/729294.
This will produce a link to https://gist.github.com/dperini/729294. (including the dot at the end) which won’t work.
Maybe one has to distinguish between a valid URL and a working URL.
The trailing dot is indeed perfectly valid as part of the path. How you extract URLs from the text before validating them is an entirely separate problem that this lib should not attempt to address.
The trailing dot is indeed perfectly valid as part of the path. How you extract URLs from the text before validating them is an entirely separate problem that this lib should not attempt to address.
Fair enough. You don’t happen to know of any lib to reliably extract URLs from texts?
I've ported this to PowerShell and loving it! What an amazing validation script. If at all possible, is there an easy modification that would make the protocol optional so paths like the below are allowed:
www.google.com
github.com/PowerShell/PowerShell
4sysops.com/archives/
www.bing.com
I've tried commenting out the initial protocol identifier "(?:(?:(?:https?|ftp):)?\/\/)"
, but then paths with protocols don't get matched / validated.
Any help would be awesome.
@visusys I'd recommend approaching it from the other direction – instead of adjusting the validator to allow invalid things, adjust your data. Check your own URLs and if they don't start with https://
, add it on before validating. If necessary for your situation, remove it again afterwards.
@visusys I'd recommend approaching it from the other direction – instead of adjusting the validator to allow invalid things, adjust your data. Check your own URLs and if they don't start with
https://
, add it on before validating. If necessary for your situation, remove it again afterwards.
I actually got it all figured out. All I needed to do was add a question mark at the end of the protocol identifier's non-capturing group:
(?:(?:(?:https?|ftp):)?\/\/)?
For anyone interested, I also ported the entire thing to PowerShell:
https://gist.github.com/visusys/1647c1a17ecfd4c305bfbf86b652084f
How would one use this in a Postgres DB using the POSIX matching? This is a little "above my pay grade" as I am struggling with this
try re_weburl.test('https://0.。。'). It returns true
@dperini
[\w\d.-@]+?(com|net|cn|org|asp|php)([/\w.?=]+)*/i
I suggest removing a character that causes the URL to be invalid: _.
/^(?:(?:(?:https?|ftp):)?\/\/)(?:\S+(?::\S*)?@)?(?:(?!(?:10|127)(?:\.\d{1,3}){3})(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))|(?:(?:[a-z0-9\u00a1-\uffff][a-z0-9\u00a1-\uffff-]{0,62})?[a-z0-9\u00a1-\uffff]\.)+(?:[a-z\u00a1-\uffff]{2,}\.?))(?::\d{2,5})?(?:[/?#]\S*)?$/i
really helpful, thx
Are those valid urls? Because they are marked as invalid:
http://www.google.com/"asdf"
http://google.com//
http://google.com/asd//
No, as according to URL Standard, "
is not a valid character in path segments (not an url code point). Only the percent-encoded form %22
is valid.
However:
As far as I understand URL-path-segment-string, they should be valid:
zero or more URL units excluding U+002F (/) and U+003F (?), that together are not a single-dot path segment or a double-dot path segment
=> zero URL units (empty string) seems to be a valid URL-path-segment-string according to URL Standard and therefore these two examples should be valid URLs
I suggest removing a character that causes the URL to be invalid: _.
/^(?:(?:(?:https?|ftp):)?\/\/)(?:\S+(?::\S*)?@)?(?:(?!(?:10|127)(?:\.\d{1,3}){3})(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))|(?:(?:[a-z0-9\u00a1-\uffff][a-z0-9\u00a1-\uffff-]{0,62})?[a-z0-9\u00a1-\uffff]\.)+(?:[a-z\u00a1-\uffff]{2,}\.?))(?::\d{2,5})?(?:[/?#]\S*)?$/i
not support space
@dperini - "(?::\d{2,5})?" for port testing Usually the port is always two numbers long at least. But technically, the valid port range is 0-65535 so the rule should be {1,5}
in this case. What is you thought?
@sakshivishnoi681
{1,5} will be precise to cover the number of digits (5) that could possibly be constructed by using 1 to 5 digits.
However the scope it was written {2,5} is to cover real world cases where PORTS of just 1 digit are excluded.
What about IPv6?
This fails:
http://[fe80::1] even though it is perfectly legal...
It doesn't match http://www.4j/images/iepng/iepngfix.webp you can check it here.
Previously discussed punycode TLDs aside, the hypothetical
www.4j
above raises the question whether numbers in TLDs should be allowed by the regex (although there are currently none in the Public Suffix List)?
^ "number in TLD" has come up before: there are currently no TLDs containing digits in the PSL, meaning that you probably won't encounter it in the wild 👍
But IP addresses do have digit ....
1.1.1.1 would beg to differ
yeah, but you can also use one.one.one.one if you want an domain name instead of an IP.
@dperini, incorrectly returns
true
for"https://goo.gl."
.Edit: Hmm... it seems that chrome does accept that as a valid url. How so?