summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJoeyHess <>2020-05-22 16:09:00 (GMT)
committerhdiff <hdiff@hdiff.luite.com>2020-05-22 16:09:00 (GMT)
commitd0b3d1b1090422456ce74c6f4301ce65d3d63669 (patch)
tree7bc8ceb4f5261da0b1dba399a2ecad82a4444d77
parentf7be44ecbe0ac0d86e13d450f6c97b292cde2918 (diff)
version 8.20200522HEAD8.20200522master
-rw-r--r--Annex/Content.hs18
-rw-r--r--Annex/Import.hs26
-rw-r--r--Annex/Ingest.hs8
-rw-r--r--Annex/UntrustedFilePath.hs64
-rw-r--r--Annex/Url.hs5
-rw-r--r--Backend.hs11
-rw-r--r--Backend/Hash.hs19
-rw-r--r--Backend/URL.hs2
-rw-r--r--Backend/WORM.hs6
-rw-r--r--CHANGELOG37
-rw-r--r--CmdLine/Seek.hs37
-rw-r--r--Command/AddUrl.hs85
-rw-r--r--Command/CalcKey.hs6
-rw-r--r--Command/Drop.hs2
-rw-r--r--Command/Export.hs25
-rw-r--r--Command/Fsck.hs20
-rw-r--r--Command/Get.hs2
-rw-r--r--Command/Import.hs6
-rw-r--r--Command/ImportFeed.hs3
-rw-r--r--Command/Migrate.hs6
-rw-r--r--Command/Move.hs6
-rw-r--r--Command/Reinject.hs16
-rw-r--r--Command/Sync.hs37
-rw-r--r--Command/TestRemote.hs60
-rw-r--r--Command/TransferKey.hs19
-rw-r--r--Command/TransferKeys.hs17
-rw-r--r--Command/Upgrade.hs7
-rw-r--r--Command/Whereis.hs74
-rw-r--r--Creds.hs10
-rw-r--r--Git/Repair.hs32
-rw-r--r--Remote.hs17
-rw-r--r--Remote/Adb.hs118
-rw-r--r--Remote/BitTorrent.hs34
-rw-r--r--Remote/Bup.hs22
-rw-r--r--Remote/Ddar.hs21
-rw-r--r--Remote/Directory.hs143
-rw-r--r--Remote/Directory/LegacyChunked.hs11
-rw-r--r--Remote/External.hs147
-rw-r--r--Remote/GCrypt.hs23
-rw-r--r--Remote/Git.hs153
-rw-r--r--Remote/GitLFS.hs74
-rw-r--r--Remote/Glacier.hs112
-rw-r--r--Remote/Helper/Chunked.hs89
-rw-r--r--Remote/Helper/Chunked/Legacy.hs21
-rw-r--r--Remote/Helper/ExportImport.hs82
-rw-r--r--Remote/Helper/Hooks.hs4
-rw-r--r--Remote/Helper/Http.hs2
-rw-r--r--Remote/Helper/P2P.hs34
-rw-r--r--Remote/Helper/ReadOnly.hs25
-rw-r--r--Remote/Helper/Special.hs138
-rw-r--r--Remote/Helper/Ssh.hs8
-rw-r--r--Remote/Hook.hs37
-rw-r--r--Remote/P2P.hs2
-rw-r--r--Remote/Rsync.hs64
-rw-r--r--Remote/S3.hs197
-rw-r--r--Remote/Tahoe.hs25
-rw-r--r--Remote/Web.hs44
-rw-r--r--Remote/WebDAV.hs170
-rw-r--r--Test.hs18
-rw-r--r--Test/Framework.hs5
-rw-r--r--Types/Backend.hs2
-rw-r--r--Types/Remote.hs72
-rw-r--r--Types/StoreRetrieve.hs13
-rw-r--r--Types/UrlContents.hs31
-rw-r--r--Upgrade/V5.hs6
-rw-r--r--Utility/CoProcess.hs2
-rw-r--r--Utility/Exception.hs2
-rw-r--r--Utility/Format.hs36
-rw-r--r--Utility/Path.hs22
-rw-r--r--Utility/Url.hs6
-rw-r--r--doc/git-annex-add.mdwn7
-rw-r--r--doc/git-annex-addurl.mdwn10
-rw-r--r--doc/git-annex-drop.mdwn13
-rw-r--r--doc/git-annex-get.mdwn10
-rw-r--r--doc/git-annex-init.mdwn9
-rw-r--r--doc/git-annex-lock.mdwn11
-rw-r--r--doc/git-annex-p2p.mdwn3
-rw-r--r--doc/git-annex-unlock.mdwn14
-rw-r--r--doc/git-annex-whereis.mdwn26
-rw-r--r--doc/git-annex.mdwn15
-rw-r--r--git-annex.cabal10
81 files changed, 1547 insertions, 1279 deletions
diff --git a/Annex/Content.hs b/Annex/Content.hs
index 3a48998..0e9fd9b 100644
--- a/Annex/Content.hs
+++ b/Annex/Content.hs
@@ -584,21 +584,19 @@ unlinkAnnex key = do
{- Runs an action to transfer an object's content.
-
- In some cases, it's possible for the file to change as it's being sent.
- - If this happens, runs the rollback action and returns False. The
- - rollback action should remove the data that was transferred.
+ - If this happens, runs the rollback action and throws an exception.
+ - The rollback action should remove the data that was transferred.
-}
-sendAnnex :: Key -> Annex () -> (FilePath -> Annex Bool) -> Annex Bool
+sendAnnex :: Key -> Annex () -> (FilePath -> Annex a) -> Annex a
sendAnnex key rollback sendobject = go =<< prepSendAnnex key
where
- go Nothing = return False
go (Just (f, checksuccess)) = do
r <- sendobject f
- ifM checksuccess
- ( return r
- , do
- rollback
- return False
- )
+ unlessM checksuccess $ do
+ rollback
+ giveup "content changed while it was being sent"
+ return r
+ go Nothing = giveup "content not available to send"
{- Returns a file that contains an object's content,
- and a check to run after the transfer is complete.
diff --git a/Annex/Import.hs b/Annex/Import.hs
index 9d21a27..e4d7d77 100644
--- a/Annex/Import.hs
+++ b/Annex/Import.hs
@@ -349,16 +349,20 @@ downloadImport remote importtreeconfig importablecontents = do
return (Right job)
download cidmap db (loc, (cid, sz)) = do
- let rundownload tmpfile p =
- Remote.retrieveExportWithContentIdentifier ia loc cid tmpfile (mkkey loc tmpfile) p >>= \case
- Just k -> tryNonAsync (moveAnnex k tmpfile) >>= \case
- Right True -> do
- recordcidkey cidmap db cid k
- logStatus k InfoPresent
- logChange k (Remote.uuid remote) InfoPresent
- return $ Just (loc, k)
- _ -> return Nothing
- Nothing -> return Nothing
+ let downloader tmpfile p = do
+ k <- Remote.retrieveExportWithContentIdentifier ia loc cid tmpfile (mkkey loc tmpfile) p
+ ok <- moveAnnex k tmpfile
+ return (k, ok)
+ let rundownload tmpfile p = tryNonAsync (downloader tmpfile p) >>= \case
+ Right (k, True) -> do
+ recordcidkey cidmap db cid k
+ logStatus k InfoPresent
+ logChange k (Remote.uuid remote) InfoPresent
+ return $ Just (loc, k)
+ Right (_, False) -> return Nothing
+ Left e -> do
+ warning (show e)
+ return Nothing
checkDiskSpaceToGet tmpkey Nothing $
withTmp tmpkey $ \tmpfile ->
metered Nothing tmpkey $
@@ -375,7 +379,7 @@ downloadImport remote importtreeconfig importablecontents = do
, contentLocation = toRawFilePath tmpfile
, inodeCache = Nothing
}
- fmap fst <$> genKey ks nullMeterUpdate backend
+ fst <$> genKey ks nullMeterUpdate backend
locworktreefilename loc = asTopFilePath $ case importtreeconfig of
ImportTree -> fromImportLocation loc
diff --git a/Annex/Ingest.hs b/Annex/Ingest.hs
index 2a1cf08..d265403 100644
--- a/Annex/Ingest.hs
+++ b/Annex/Ingest.hs
@@ -159,8 +159,8 @@ ingest' preferredbackend meterupdate (Just (LockedDown cfg source)) mk restage =
(chooseBackend $ fromRawFilePath $ keyFilename source)
(return . Just)
preferredbackend
- fmap fst <$> genKey source meterupdate backend
- Just k -> return (Just k)
+ fst <$> genKey source meterupdate backend
+ Just k -> return k
let src = contentLocation source
ms <- liftIO $ catchMaybeIO $ R.getFileStatus src
mcache <- maybe (pure Nothing) (liftIO . toInodeCache delta (fromRawFilePath src)) ms
@@ -169,10 +169,10 @@ ingest' preferredbackend meterupdate (Just (LockedDown cfg source)) mk restage =
(Just newc, Just c) | compareStrong c newc -> go k mcache ms
_ -> failure "changed while it was being added"
where
- go (Just key) mcache (Just s)
+ go key mcache (Just s)
| lockingFile cfg = golocked key mcache s
| otherwise = gounlocked key mcache s
- go _ _ _ = failure "failed to generate a key"
+ go _ _ Nothing = failure "failed to generate a key"
golocked key mcache s =
tryNonAsync (moveAnnex key $ fromRawFilePath $ contentLocation source) >>= \case
diff --git a/Annex/UntrustedFilePath.hs b/Annex/UntrustedFilePath.hs
new file mode 100644
index 0000000..2ec3784
--- /dev/null
+++ b/Annex/UntrustedFilePath.hs
@@ -0,0 +1,64 @@
+{- handling untrusted filepaths
+ -
+ - Copyright 2010-2020 Joey Hess <id@joeyh.name>
+ -
+ - Licensed under the GNU AGPL version 3 or higher.
+ -}
+
+module Annex.UntrustedFilePath where
+
+import Data.Char
+import System.FilePath
+
+{- Given a string that we'd like to use as the basis for FilePath, but that
+ - was provided by a third party and is not to be trusted, returns the closest
+ - sane FilePath.
+ -
+ - All spaces and punctuation and other wacky stuff are replaced
+ - with '_', except for '.' and '-'
+ -
+ - "../" becomes ".._", which is safe.
+ - "/foo" becomes "_foo", which is safe.
+ - "c:foo" becomes "c_foo", which is safe even on windows.
+ -
+ - Leading '.' and '-' are also replaced with '_', so
+ - so no dotfiles that might control a program are inadvertently created,
+ - and to avoid filenames being treated as options to commands the user
+ - might run.
+ -
+ - Also there's an off chance the string might be empty, so to avoid
+ - needing to handle such an invalid filepath, return a dummy "file" in
+ - that case.
+ -}
+sanitizeFilePath :: String -> FilePath
+sanitizeFilePath [] = "file"
+sanitizeFilePath f = leading (map sanitize f)
+ where
+ sanitize c
+ | c == '.' || c == '-' = c
+ | isSpace c || isPunctuation c || isSymbol c || isControl c || c == '/' = '_'
+ | otherwise = c
+
+ leading ('.':s) = '_':s
+ leading ('-':s) = '_':s
+ leading s = s
+
+escapeSequenceInFilePath :: FilePath -> Bool
+escapeSequenceInFilePath f = '\ESC' `elem` f
+
+{- ../ is a path traversal, no matter where it appears.
+ -
+ - An absolute path is, of course.
+ -}
+pathTraversalInFilePath :: FilePath -> Bool
+pathTraversalInFilePath f
+ | isAbsolute f = True
+ | any (== "..") (splitPath f) = True
+ -- On windows, C:foo with no directory is not considered absolute
+ | hasDrive f = True
+ | otherwise = False
+
+gitDirectoryInFilePath :: FilePath -> Bool
+gitDirectoryInFilePath = any (== ".git")
+ . map dropTrailingPathSeparator
+ . splitPath
diff --git a/Annex/Url.hs b/Annex/Url.hs
index fc8c543..77d48b0 100644
--- a/Annex/Url.hs
+++ b/Annex/Url.hs
@@ -14,6 +14,7 @@ module Annex.Url (
ipAddressesUnlimited,
checkBoth,
download,
+ download',
exists,
getUrlInfo,
U.downloadQuiet,
@@ -172,6 +173,10 @@ download meterupdate url file uo =
Right () -> return True
Left err -> warning err >> return False
+download' :: MeterUpdate -> U.URLString -> FilePath -> U.UrlOptions -> Annex (Either String ())
+download' meterupdate url file uo =
+ liftIO (U.download meterupdate url file uo)
+
exists :: U.URLString -> U.UrlOptions -> Annex Bool
exists url uo = liftIO (U.exists url uo) >>= \case
Right b -> return b
diff --git a/Backend.hs b/Backend.hs
index 9a0abf7..2ca5aef 100644
--- a/Backend.hs
+++ b/Backend.hs
@@ -51,12 +51,15 @@ defaultBackend = maybe cache return =<< Annex.getState Annex.backend
lookupname = lookupBackendVariety . parseKeyVariety . encodeBS
{- Generates a key for a file. -}
-genKey :: KeySource -> MeterUpdate -> Maybe Backend -> Annex (Maybe (Key, Backend))
+genKey :: KeySource -> MeterUpdate -> Maybe Backend -> Annex (Key, Backend)
genKey source meterupdate preferredbackend = do
b <- maybe defaultBackend return preferredbackend
- B.getKey b source meterupdate >>= return . \case
- Nothing -> Nothing
- Just k -> Just (makesane k, b)
+ case B.getKey b of
+ Just a -> do
+ k <- a source meterupdate
+ return (makesane k, b)
+ Nothing -> giveup $ "Cannot generate a key for backend " ++
+ decodeBS (formatKeyVariety (B.backendVariety b))
where
-- keyNames should not contain newline characters.
makesane k = alterKey k $ \d -> d
diff --git a/Backend/Hash.hs b/Backend/Hash.hs
index 9ac00f8..bef3bbb 100644
--- a/Backend/Hash.hs
+++ b/Backend/Hash.hs
@@ -63,7 +63,7 @@ backends = concatMap (\h -> [genBackendE h, genBackend h]) hashes
genBackend :: Hash -> Backend
genBackend hash = Backend
{ backendVariety = hashKeyVariety hash (HasExt False)
- , getKey = keyValue hash
+ , getKey = Just (keyValue hash)
, verifyKeyContent = Just $ checkKeyChecksum hash
, canUpgradeKey = Just needsUpgrade
, fastMigrate = Just trivialMigrate
@@ -73,7 +73,7 @@ genBackend hash = Backend
genBackendE :: Hash -> Backend
genBackendE hash = (genBackend hash)
{ backendVariety = hashKeyVariety hash (HasExt True)
- , getKey = keyValueE hash
+ , getKey = Just (keyValueE hash)
}
hashKeyVariety :: Hash -> HasExt -> KeyVariety
@@ -88,26 +88,26 @@ hashKeyVariety (Blake2sHash size) he = Blake2sKey size he
hashKeyVariety (Blake2spHash size) he = Blake2spKey size he
{- A key is a hash of its contents. -}
-keyValue :: Hash -> KeySource -> MeterUpdate -> Annex (Maybe Key)
+keyValue :: Hash -> KeySource -> MeterUpdate -> Annex Key
keyValue hash source meterupdate = do
let file = fromRawFilePath (contentLocation source)
filesize <- liftIO $ getFileSize file
s <- hashFile hash file meterupdate
- return $ Just $ mkKey $ \k -> k
+ return $ mkKey $ \k -> k
{ keyName = encodeBS s
, keyVariety = hashKeyVariety hash (HasExt False)
, keySize = Just filesize
}
{- Extension preserving keys. -}
-keyValueE :: Hash -> KeySource -> MeterUpdate -> Annex (Maybe Key)
+keyValueE :: Hash -> KeySource -> MeterUpdate -> Annex Key
keyValueE hash source meterupdate =
- keyValue hash source meterupdate >>= maybe (return Nothing) addE
+ keyValue hash source meterupdate >>= addE
where
addE k = do
maxlen <- annexMaxExtensionLength <$> Annex.getGitConfig
let ext = selectExtension maxlen (keyFilename source)
- return $ Just $ alterKey k $ \d -> d
+ return $ alterKey k $ \d -> d
{ keyName = keyName d <> ext
, keyVariety = hashKeyVariety hash (HasExt True)
}
@@ -296,7 +296,10 @@ md5Hasher = show . md5
testKeyBackend :: Backend
testKeyBackend =
let b = genBackendE (SHA2Hash (HashSize 256))
- in b { getKey = \ks p -> (fmap addE) <$> getKey b ks p }
+ gk = case getKey b of
+ Nothing -> Nothing
+ Just f -> Just (\ks p -> addE <$> f ks p)
+ in b { getKey = gk }
where
addE k = alterKey k $ \d -> d
{ keyName = keyName d <> longext
diff --git a/Backend/URL.hs b/Backend/URL.hs
index 7e6313d..ae178f8 100644
--- a/Backend/URL.hs
+++ b/Backend/URL.hs
@@ -21,7 +21,7 @@ backends = [backend]
backend :: Backend
backend = Backend
{ backendVariety = URLKey
- , getKey = \_ _ -> return Nothing
+ , getKey = Nothing
, verifyKeyContent = Nothing
, canUpgradeKey = Nothing
, fastMigrate = Nothing
diff --git a/Backend/WORM.hs b/Backend/WORM.hs
index e141e1c..eaf9a9b 100644
--- a/Backend/WORM.hs
+++ b/Backend/WORM.hs
@@ -24,7 +24,7 @@ backends = [backend]
backend :: Backend
backend = Backend
{ backendVariety = WORMKey
- , getKey = keyValue
+ , getKey = Just keyValue
, verifyKeyContent = Nothing
, canUpgradeKey = Just needsUpgrade
, fastMigrate = Just removeSpaces
@@ -34,14 +34,14 @@ backend = Backend
{- The key includes the file size, modification time, and the
- original filename relative to the top of the git repository.
-}
-keyValue :: KeySource -> MeterUpdate -> Annex (Maybe Key)
+keyValue :: KeySource -> MeterUpdate -> Annex Key
keyValue source _ = do
let f = contentLocation source
stat <- liftIO $ R.getFileStatus f
sz <- liftIO $ getFileSize' (fromRawFilePath f) stat
relf <- fromRawFilePath . getTopFilePath
<$> inRepo (toTopFilePath $ keyFilename source)
- return $ Just $ mkKey $ \k -> k
+ return $ mkKey $ \k -> k
{ keyName = genKeyName relf
, keyVariety = WORMKey
, keySize = Just sz
diff --git a/CHANGELOG b/CHANGELOG
index e9a1d4a..60e80c3 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -1,3 +1,40 @@
+git-annex (8.20200522) upstream; urgency=medium
+
+ * Fix bug that made enableremote of S3 and webdav remotes, that
+ have embedcreds=yes, fail to set up the embedded creds, so accessing
+ the remotes failed. (Regression introduced in version 7.20200202.7)
+ * addurl, importfeed: Avoid adding filenames with leading '.', instead
+ it will be replaced with '_'.
+ * addurl, importfeed: Allow '-' in filenames, as long as it's not the
+ first character.
+ * addurl --preserve-filename: New option, uses server-provided filename
+ without any sanitization, but will fail if the filename has an obvious
+ security problem like using an escape sequence or trying to escape
+ the current directory.
+ * whereis: Added --format option.
+ * S3: Support signature=v4, to use S3 Signature Version 4.
+ Some S3 services seem to require v4, while others may only
+ support v2, which remains the default.
+ * upgrade: When upgrade fails due to an exception, display it.
+ * repair: Improve fetching from a remote with an url in host:path format.
+ * git-lfs repos that encrypt the annexed content but not the git repo
+ only need --force passed to initremote, allow enableremote and
+ autoenable of such remotes without forcing again.
+ * When accessing a remote fails, always display a reason why.
+ * Display a warning message when a remote uses a protocol, such as
+ git://, that git-annex does not support. Silently skipping such a
+ remote was confusing behavior.
+ * Also display a warning message when a remote, without a known uuid,
+ is located in a directory that does not currently exist, to avoid
+ silently skipping such a remote.
+ * sync: Avoid an ugly error message when nothing has been committed to
+ master yet and there is a synced master branch to merge from.
+ * Display a warning message when asked to operate on a file inside a
+ directory that's a symbolic link to elsewhere.
+ * Support building with tasty-1.3.
+
+ -- Joey Hess <id@joeyh.name> Fri, 22 May 2020 12:05:01 -0400
+
git-annex (8.20200501) upstream; urgency=medium
* Improve git-annex's ability to find the path to its program,
diff --git a/CmdLine/Seek.hs b/CmdLine/Seek.hs
index 6f035fd..77c3bd0 100644
--- a/CmdLine/Seek.hs
+++ b/CmdLine/Seek.hs
@@ -243,28 +243,47 @@ newtype WorkTreeItem = WorkTreeItem FilePath
-- seeking for such files.
newtype AllowHidden = AllowHidden Bool
--- Many git commands seek work tree items matching some criteria,
+-- Many git commands like ls-files seek work tree items matching some criteria,
-- and silently skip over anything that does not exist. But users expect
-- an error message when one of the files they provided as a command-line
-- parameter doesn't exist, so this checks that each exists.
+--
+-- Also, when two directories are symlinked, referring to a file
+-- inside the symlinked directory will be silently skipped by git commands
+-- like ls-files. But, the user would be surprised for it to be skipped, so
+-- check if the parent directories are symlinks.
workTreeItems :: CmdParams -> Annex [WorkTreeItem]
workTreeItems = workTreeItems' (AllowHidden False)
workTreeItems' :: AllowHidden -> CmdParams -> Annex [WorkTreeItem]
workTreeItems' (AllowHidden allowhidden) ps = do
currbranch <- getCurrentBranch
- forM_ ps $ \p ->
- unlessM (exists p <||> hidden currbranch p) $ do
- toplevelWarning False (p ++ " not found")
- Annex.incError
+ forM_ ps $ \p -> do
+ relf <- liftIO $ relPathCwdToFile p
+ ifM (not <$> (exists p <||> hidden currbranch relf))
+ ( prob (p ++ " not found")
+ , whenM (viasymlink (upFrom relf)) $
+ prob (p ++ " is beyond a symbolic link")
+ )
return (map (WorkTreeItem) ps)
where
exists p = isJust <$> liftIO (catchMaybeIO $ getSymbolicLinkStatus p)
- hidden currbranch p
- | allowhidden = do
- f <- liftIO $ relPathCwdToFile p
- isJust <$> catObjectMetaDataHidden (toRawFilePath f) currbranch
+
+ viasymlink Nothing = return False
+ viasymlink (Just p) =
+ ifM (liftIO $ isSymbolicLink <$> getSymbolicLinkStatus p)
+ ( return True
+ , viasymlink (upFrom p)
+ )
+
+ hidden currbranch f
+ | allowhidden = isJust
+ <$> catObjectMetaDataHidden (toRawFilePath f) currbranch
| otherwise = return False
+ prob msg = do
+ toplevelWarning False msg
+ Annex.incError
+
notSymlink :: RawFilePath -> IO Bool
notSymlink f = liftIO $ not . isSymbolicLink <$> R.getSymbolicLinkStatus f
diff --git a/Command/AddUrl.hs b/Command/AddUrl.hs
index 9097b7f..4341aca 100644
--- a/Command/AddUrl.hs
+++ b/Command/AddUrl.hs
@@ -1,6 +1,6 @@
{- git-annex command
-
- - Copyright 2011-2017 Joey Hess <id@joeyh.name>
+ - Copyright 2011-2020 Joey Hess <id@joeyh.name>
-
- Licensed under the GNU AGPL version 3 or higher.
-}
@@ -23,6 +23,7 @@ import Annex.CheckIgnore
import Annex.Perms
import Annex.UUID
import Annex.YoutubeDl
+import Annex.UntrustedFilePath
import Logs.Web
import Types.KeySource
import Types.UrlContents
@@ -52,6 +53,7 @@ data DownloadOptions = DownloadOptions
{ relaxedOption :: Bool
, rawOption :: Bool
, fileOption :: Maybe FilePath
+ , preserveFilenameOption :: Bool
}
optParser :: CmdParamsDesc -> Parser AddUrlOptions
@@ -77,7 +79,7 @@ optParser desc = AddUrlOptions
)
parseDownloadOptions :: Bool -> Parser DownloadOptions
-parseDownloadOptions withfileoption = DownloadOptions
+parseDownloadOptions withfileoptions = DownloadOptions
<$> switch
( long "relaxed"
<> help "skip size check"
@@ -86,12 +88,18 @@ parseDownloadOptions withfileoption = DownloadOptions
( long "raw"
<> help "disable special handling for torrents, youtube-dl, etc"
)
- <*> if withfileoption
+ <*> (if withfileoptions
then optional (strOption
( long "file" <> metavar paramFile
<> help "specify what file the url is added to"
))
- else pure Nothing
+ else pure Nothing)
+ <*> (if withfileoptions
+ then switch
+ ( long "preserve-filename"
+ <> help "use filename provided by server as-is"
+ )
+ else pure False)
seek :: AddUrlOptions -> CommandSeek
seek o = startConcurrency commandStages $ do
@@ -129,13 +137,15 @@ checkUrl addunlockedmatcher r o u = do
warning (show e)
next $ return False
go deffile (Right (UrlContents sz mf)) = do
- let f = adjustFile o (fromMaybe (maybe deffile fromSafeFilePath mf) (fileOption (downloadOptions o)))
- void $ commandAction $ startRemote addunlockedmatcher r o f u sz
+ f <- maybe (pure deffile) (sanitizeOrPreserveFilePath o) mf
+ let f' = adjustFile o (fromMaybe f (fileOption (downloadOptions o)))
+ void $ commandAction $ startRemote addunlockedmatcher r o f' u sz
go deffile (Right (UrlMulti l)) = case fileOption (downloadOptions o) of
Nothing ->
forM_ l $ \(u', sz, f) -> do
- let f' = adjustFile o (deffile </> fromSafeFilePath f)
- void $ commandAction $ startRemote addunlockedmatcher r o f' u' sz
+ f' <- sanitizeOrPreserveFilePath o f
+ let f'' = adjustFile o (deffile </> sanitizeFilePath f')
+ void $ commandAction $ startRemote addunlockedmatcher r o f'' u' sz
Just f -> case l of
[] -> noop
((u',sz,_):[]) -> do
@@ -179,15 +189,15 @@ downloadRemoteFile addunlockedmatcher r o uri file sz = checkCanAdd file $ do
-- so that the remote knows what url it
-- should use to download it.
setTempUrl urlkey loguri
- let downloader = \dest p -> fst
- <$> Remote.retrieveKeyFile r urlkey
- (AssociatedFile (Just (toRawFilePath file))) dest p
+ let downloader = \dest p ->
+ fst <$> Remote.verifiedAction (Remote.retrieveKeyFile r urlkey af dest p)
ret <- downloadWith addunlockedmatcher downloader urlkey (Remote.uuid r) loguri file
removeTempUrl urlkey
return ret
)
where
loguri = setDownloader uri OtherDownloader
+ af = AssociatedFile (Just (toRawFilePath file))
startWeb :: AddUnlockedMatcher -> AddUrlOptions -> URLString -> CommandStart
startWeb addunlockedmatcher o urlstring = go $ fromMaybe bad $ parseURI urlstring
@@ -207,16 +217,41 @@ startWeb addunlockedmatcher o urlstring = go $ fromMaybe bad $ parseURI urlstrin
file <- adjustFile o <$> case fileOption (downloadOptions o) of
Just f -> pure f
Nothing -> case Url.urlSuggestedFile urlinfo of
- Nothing -> pure $ url2file url (pathdepthOption o) pathmax
Just sf -> do
- let f = truncateFilePath pathmax $
- sanitizeFilePath sf
- ifM (liftIO $ doesFileExist f <||> doesDirectoryExist f)
- ( pure $ url2file url (pathdepthOption o) pathmax
- , pure f
- )
+ f <- sanitizeOrPreserveFilePath o sf
+ if preserveFilenameOption (downloadOptions o)
+ then pure f
+ else ifM (liftIO $ doesFileExist f <||> doesDirectoryExist f)
+ ( pure $ url2file url (pathdepthOption o) pathmax
+ , pure f
+ )
+ _ -> pure $ url2file url (pathdepthOption o) pathmax
performWeb addunlockedmatcher o urlstring file urlinfo
+sanitizeOrPreserveFilePath :: AddUrlOptions -> FilePath -> Annex FilePath
+sanitizeOrPreserveFilePath o f
+ | preserveFilenameOption (downloadOptions o) && not (null f) = do
+ checkPreserveFileNameSecurity f
+ return f
+ | otherwise = do
+ pathmax <- liftIO $ fileNameLengthLimit "."
+ return $ truncateFilePath pathmax $ sanitizeFilePath f
+
+-- sanitizeFilePath avoids all these security problems
+-- (and probably others, but at least this catches the most egrarious ones).
+checkPreserveFileNameSecurity :: FilePath -> Annex ()
+checkPreserveFileNameSecurity f = do
+ checksecurity escapeSequenceInFilePath False "escape sequence"
+ checksecurity pathTraversalInFilePath True "path traversal"
+ checksecurity gitDirectoryInFilePath True "contains a .git directory"
+ where
+ checksecurity p canshow d = when (p f) $
+ giveup $ concat
+ [ "--preserve-filename was used, but the filename "
+ , if canshow then "(" ++ f ++ ") " else ""
+ , "has a security problem (" ++ d ++ "), not adding."
+ ]
+
performWeb :: AddUnlockedMatcher -> AddUrlOptions -> URLString -> FilePath -> Url.UrlInfo -> CommandPerform
performWeb addunlockedmatcher o url file urlinfo = ifAnnexed (toRawFilePath file) addurl geturl
where
@@ -278,7 +313,7 @@ downloadWeb addunlockedmatcher o url urlinfo file =
normalfinish tmp = checkCanAdd file $ do
showDestinationFile file
createWorkTreeDirectory (parentDir file)
- finishDownloadWith addunlockedmatcher tmp webUUID url file
+ Just <$> finishDownloadWith addunlockedmatcher tmp webUUID url file
tryyoutubedl tmp
-- Ask youtube-dl what filename it will download
-- first, and check if that is already an annexed file,
@@ -353,7 +388,7 @@ downloadWith addunlockedmatcher downloader dummykey u url file =
where
afile = AssociatedFile (Just (toRawFilePath file))
go Nothing = return Nothing
- go (Just tmp) = finishDownloadWith addunlockedmatcher tmp u url file
+ go (Just tmp) = Just <$> finishDownloadWith addunlockedmatcher tmp u url file
{- Like downloadWith, but leaves the dummy key content in
- the returned location. -}
@@ -369,7 +404,7 @@ downloadWith' downloader dummykey u url afile =
then return (Just tmp)
else return Nothing
-finishDownloadWith :: AddUnlockedMatcher -> FilePath -> UUID -> URLString -> FilePath -> Annex (Maybe Key)
+finishDownloadWith :: AddUnlockedMatcher -> FilePath -> UUID -> URLString -> FilePath -> Annex Key
finishDownloadWith addunlockedmatcher tmp u url file = do
backend <- chooseBackend file
let source = KeySource
@@ -377,11 +412,9 @@ finishDownloadWith addunlockedmatcher tmp u url file = do
, contentLocation = toRawFilePath tmp
, inodeCache = Nothing
}
- genKey source nullMeterUpdate backend >>= \case
- Nothing -> return Nothing
- Just (key, _) -> do
- addWorkTree addunlockedmatcher u url file key (Just tmp)
- return (Just key)
+ key <- fst <$> genKey source nullMeterUpdate backend
+ addWorkTree addunlockedmatcher u url file key (Just tmp)
+ return key
{- Adds the url size to the Key. -}
addSizeUrlKey :: Url.UrlInfo -> Key -> Key
diff --git a/Command/CalcKey.hs b/Command/CalcKey.hs
index 781c8b8..c1c5016 100644
--- a/Command/CalcKey.hs
+++ b/Command/CalcKey.hs
@@ -20,11 +20,11 @@ cmd = noCommit $ noMessages $ dontCheck repoExists $
(batchable run (pure ()))
run :: () -> String -> Annex Bool
-run _ file = genKey ks nullMeterUpdate Nothing >>= \case
- Just (k, _) -> do
+run _ file = tryNonAsync (genKey ks nullMeterUpdate Nothing) >>= \case
+ Right (k, _) -> do
liftIO $ putStrLn $ serializeKey k
return True
- Nothing -> return False
+ Left _err -> return False
where
ks = KeySource file' file' Nothing
file' = toRawFilePath file
diff --git a/Command/Drop.hs b/Command/Drop.hs
index 9b8c471..785b491 100644
--- a/Command/Drop.hs
+++ b/Command/Drop.hs
@@ -131,7 +131,7 @@ performRemote key afile numcopies remote = do
, "proof:"
, show proof
]
- ok <- Remote.removeKey remote key
+ ok <- Remote.action (Remote.removeKey remote key)
next $ cleanupRemote key remote ok
, stop
)
diff --git a/Command/Export.hs b/Command/Export.hs
index 97c9014..05158cf 100644
--- a/Command/Export.hs
+++ b/Command/Export.hs
@@ -20,6 +20,7 @@ import qualified Git.Ref
import Git.Types
import Git.FilePath
import Git.Sha
+import qualified Remote
import Types.Remote
import Types.Export
import Annex.Export
@@ -280,7 +281,8 @@ performExport r db ek af contentsha loc allfilledvar = do
let rollback = void $
performUnexport r db [ek] loc
sendAnnex k rollback $ \f ->
- storer f k loc pm
+ Remote.action $
+ storer f k loc pm
, do
showNote "not available"
return False
@@ -291,7 +293,8 @@ performExport r db ek af contentsha loc allfilledvar = do
b <- catObject contentsha
liftIO $ L.hPut h b
liftIO $ hClose h
- storer tmp sha1k loc nullMeterUpdate
+ Remote.action $
+ storer tmp sha1k loc nullMeterUpdate
let failedsend = liftIO $ modifyMVar_ allfilledvar (pure . const (AllFilled False))
case sent of
Right True -> next $ cleanupExport r db ek loc True
@@ -334,10 +337,12 @@ startUnexport' r db f ek = starting ("unexport " ++ name r) (ActionItemOther (Ju
-- not really remove the content, which must be accessible later on.
performUnexport :: Remote -> ExportHandle -> [ExportKey] -> ExportLocation -> CommandPerform
performUnexport r db eks loc = do
- ifM (allM (\ek -> removeExport (exportActions r) (asKey ek) loc) eks)
+ ifM (allM rm eks)
( next $ cleanupUnexport r db eks loc
, stop
)
+ where
+ rm ek = Remote.action $ removeExport (exportActions r) (asKey ek) loc
cleanupUnexport :: Remote -> ExportHandle -> [ExportKey] -> ExportLocation -> CommandCleanup
cleanupUnexport r db eks loc = do
@@ -391,13 +396,13 @@ startMoveFromTempName r db ek f = do
performRename :: Remote -> ExportHandle -> ExportKey -> ExportLocation -> ExportLocation -> CommandPerform
performRename r db ek src dest =
- renameExport (exportActions r) (asKey ek) src dest >>= \case
- Just True -> next $ cleanupRename r db ek src dest
- Just False -> do
- warning "rename failed; deleting instead"
+ tryNonAsync (renameExport (exportActions r) (asKey ek) src dest) >>= \case
+ Right (Just ()) -> next $ cleanupRename r db ek src dest
+ Left err -> do
+ warning $ "rename failed (" ++ show err ++ "); deleting instead"
fallbackdelete
- -- Remote does not support renaming, so don't warn about it.
- Nothing -> fallbackdelete
+ -- remote does not support renaming
+ Right Nothing -> fallbackdelete
where
fallbackdelete = performUnexport r db [ek] src
@@ -432,7 +437,7 @@ removeEmptyDirectories r db loc ks
where
go removeexportdirectory d =
ifM (liftIO $ isExportDirectoryEmpty db d)
- ( removeexportdirectory d
+ ( Remote.action $ removeexportdirectory d
, return True
)
diff --git a/Command/Fsck.hs b/Command/Fsck.hs
index 434e8b9..930162d 100644
--- a/Command/Fsck.hs
+++ b/Command/Fsck.hs
@@ -41,6 +41,7 @@ import Data.Time.Clock.POSIX
import System.Posix.Types (EpochTime)
import qualified Data.Set as S
import qualified Data.Map as M
+import Data.Either
cmd :: Command
cmd = withGlobalOptions [jobsOption, jsonOptions, annexedMatchingOptions] $
@@ -174,17 +175,20 @@ performRemote key afile backend numcopies remote =
cleanup
cleanup `after` a tmp
getfile tmp = ifM (checkDiskSpace (Just (takeDirectory tmp)) key 0 True)
- ( ifM (Remote.retrieveKeyFileCheap remote key afile tmp)
+ ( ifM (getcheap tmp)
( return (Just True)
, ifM (Annex.getState Annex.fast)
( return Nothing
- , Just . fst <$>
- Remote.retrieveKeyFile remote key (AssociatedFile Nothing) tmp dummymeter
+ , Just . isRight <$> tryNonAsync (getfile' tmp)
)
)
, return (Just False)
)
+ getfile' tmp = Remote.retrieveKeyFile remote key (AssociatedFile Nothing) tmp dummymeter
dummymeter _ = noop
+ getcheap tmp = case Remote.retrieveKeyFileCheap remote of
+ Just a -> isRight <$> tryNonAsync (a key afile tmp)
+ Nothing -> return False
startKey :: Maybe Remote -> Incremental -> (Key, ActionItem) -> NumCopies -> CommandStart
startKey from inc (key, ai) numcopies =
@@ -535,14 +539,14 @@ badContentRemote remote localcopy key = do
)
)
- dropped <- Remote.removeKey remote key
- when dropped $
+ dropped <- tryNonAsync (Remote.removeKey remote key)
+ when (isRight dropped) $
Remote.logStatus remote key InfoMissing
return $ case (movedbad, dropped) of
- (True, True) -> "moved from " ++ Remote.name remote ++
+ (True, Right ()) -> "moved from " ++ Remote.name remote ++
" to " ++ destbad
- (False, True) -> "dropped from " ++ Remote.name remote
- (_, False) -> "failed to drop from" ++ Remote.name remote
+ (False, Right ()) -> "dropped from " ++ Remote.name remote
+ (_, Left e) -> "failed to drop from" ++ Remote.name remote ++ ": " ++ show e
runFsck :: Incremental -> ActionItem -> Key -> Annex Bool -> CommandStart
runFsck inc ai key a = stopUnless (needFsck inc key) $
diff --git a/Command/Get.hs b/Command/Get.hs
index e3bf47c..16d3693 100644
--- a/Command/Get.hs
+++ b/Command/Get.hs
@@ -112,5 +112,5 @@ getKey' key afile = dispatch
download (Remote.uuid r) key afile stdRetry
(\p -> do
showAction $ "from " ++ Remote.name r
- Remote.retrieveKeyFile r key afile dest p
+ Remote.verifiedAction (Remote.retrieveKeyFile r key afile dest p)
) witness
diff --git a/Command/Import.hs b/Command/Import.hs
index 5d1c87c..825c2ec 100644
--- a/Command/Import.hs
+++ b/Command/Import.hs
@@ -227,10 +227,8 @@ startLocal addunlockedmatcher largematcher mode (srcfile, destfile) =
case v of
Just ld -> do
backend <- chooseBackend destfile
- v' <- genKey (keySource ld) nullMeterUpdate backend
- case v' of
- Just (k, _) -> a (ld, k)
- Nothing -> giveup "failed to generate a key"
+ k <- fst <$> genKey (keySource ld) nullMeterUpdate backend
+ a (ld, k)
Nothing -> stop
checkdup k dupa notdupa = ifM (isKnownKey k)
( dupa
diff --git a/Command/ImportFeed.hs b/Command/ImportFeed.hs
index 6b67066..6eb679e 100644
--- a/Command/ImportFeed.hs
+++ b/Command/ImportFeed.hs
@@ -40,6 +40,7 @@ import Logs.MetaData
import Annex.MetaData
import Annex.FileMatcher
import Command.AddUrl (addWorkTree)
+import Annex.UntrustedFilePath
cmd :: Command
cmd = notBareRepo $
@@ -189,7 +190,7 @@ performDownload addunlockedmatcher opts cache todownload = case location todownl
downloadRemoteFile addunlockedmatcher r (downloadOptions opts) url f sz
Right (UrlMulti l) -> do
kl <- forM l $ \(url', sz, subf) ->
- downloadRemoteFile addunlockedmatcher r (downloadOptions opts) url' (f </> fromSafeFilePath subf) sz
+ downloadRemoteFile addunlockedmatcher r (downloadOptions opts) url' (f </> sanitizeFilePath subf) sz
return $ if all isJust kl
then catMaybes kl
else []
diff --git a/Command/Migrate.hs b/Command/Migrate.hs
index 8464686..76e69ae 100644
--- a/Command/Migrate.hs
+++ b/Command/Migrate.hs
@@ -89,10 +89,8 @@ perform file oldkey oldbackend newbackend = go =<< genkey (fastMigrate oldbacken
, contentLocation = content
, inodeCache = Nothing
}
- v <- genKey source nullMeterUpdate (Just newbackend)
- return $ case v of
- Just (newkey, _) -> Just (newkey, False)
- _ -> Nothing
+ newkey <- fst <$> genKey source nullMeterUpdate (Just newbackend)
+ return $ Just (newkey, False)
genkey (Just fm) = fm oldkey newbackend afile >>= \case
Just newkey -> return (Just (newkey, True))
Nothing -> genkey Nothing
diff --git a/Command/Move.hs b/Command/Move.hs
index fd06ef0..c33f92d 100644
--- a/Command/Move.hs
+++ b/Command/Move.hs
@@ -127,7 +127,7 @@ toPerform dest removewhen key afile fastcheck isthere =
showAction $ "to " ++ Remote.name dest
ok <- notifyTransfer Upload afile $
upload (Remote.uuid dest) key afile stdRetry $
- Remote.storeKey dest key afile
+ Remote.action . Remote.storeKey dest key afile
if ok
then finish False $
Remote.logStatus dest key InfoPresent
@@ -203,7 +203,7 @@ fromPerform src removewhen key afile = do
go = notifyTransfer Download afile $
download (Remote.uuid src) key afile stdRetry $ \p ->
getViaTmp (Remote.retrievalSecurityPolicy src) (RemoteVerify src) key $ \t ->
- Remote.retrieveKeyFile src key afile t p
+ Remote.verifiedAction $ Remote.retrieveKeyFile src key afile t p
dispatch _ _ False = stop -- failed
dispatch RemoveNever _ True = next $ return True -- copy complete
dispatch RemoveSafe deststartedwithcopy True = lockContentShared key $ \_lck -> do
@@ -224,7 +224,7 @@ fromPerform src removewhen key afile = do
, show src
, "(" ++ reason ++ ")"
]
- ok <- Remote.removeKey src key
+ ok <- Remote.action (Remote.removeKey src key)
next $ Command.Drop.cleanupRemote key src ok
faileddropremote = do
showLongNote "(Use --force to override this check, or adjust numcopies.)"
diff --git a/Command/Reinject.hs b/Command/Reinject.hs
index 292ac39..891bec9 100644
--- a/Command/Reinject.hs
+++ b/Command/Reinject.hs
@@ -55,15 +55,13 @@ startSrcDest _ = giveup "specify a src file and a dest file"
startKnown :: FilePath -> CommandStart
startKnown src = notAnnexed src $
starting "reinject" (ActionItemOther (Just src)) $ do
- mkb <- genKey ks nullMeterUpdate Nothing
- case mkb of
- Nothing -> error "Failed to generate key"
- Just (key, _) -> ifM (isKnownKey key)
- ( perform src key
- , do
- warning "Not known content; skipping"
- next $ return True
- )
+ (key, _) <- genKey ks nullMeterUpdate Nothing
+ ifM (isKnownKey key)
+ ( perform src key
+ , do
+ warning "Not known content; skipping"
+ next $ return True
+ )
where
ks = KeySource src' src' Nothing
src' = toRawFilePath src
diff --git a/Command/Sync.hs b/Command/Sync.hs
index ac19f6a..837b742 100644
--- a/Command/Sync.hs
+++ b/Command/Sync.hs
@@ -346,36 +346,41 @@ mergeLocal mergeconfig o currbranch = stopUnless (notOnlyAnnex o) $
mergeLocal' mergeconfig o currbranch
mergeLocal' :: [Git.Merge.MergeConfig] -> SyncOptions -> CurrBranch -> CommandStart
-mergeLocal' mergeconfig o currbranch@(Just _, _) =
- needMerge currbranch >>= \case
+mergeLocal' mergeconfig o currbranch@(Just branch, _) =
+ needMerge currbranch branch >>= \case
Nothing -> stop
Just syncbranch ->
starting "merge" (ActionItemOther (Just $ Git.Ref.describe syncbranch)) $
next $ merge currbranch mergeconfig o Git.Branch.ManualCommit syncbranch
-mergeLocal' _ _ (Nothing, madj) = do
- b <- inRepo Git.Branch.currentUnsafe
- needMerge (b, madj) >>= \case
+mergeLocal' _ _ currbranch@(Nothing, _) = inRepo Git.Branch.currentUnsafe >>= \case
+ Just branch -> needMerge currbranch branch >>= \case
Nothing -> stop
Just syncbranch ->
starting "merge" (ActionItemOther (Just $ Git.Ref.describe syncbranch)) $ do
- warning $ "There are no commits yet in the currently checked out branch, so cannot merge any remote changes into it."
+ warning $ "There are no commits yet to branch " ++ Git.fromRef branch ++ ", so cannot merge " ++ Git.fromRef syncbranch ++ " into it."
next $ return False
+ Nothing -> stop
-- Returns the branch that should be merged, if any.
-needMerge :: CurrBranch -> Annex (Maybe Git.Branch)
-needMerge (Nothing, _) = return Nothing
-needMerge (Just branch, madj) = ifM (allM id checks)
+needMerge :: CurrBranch -> Git.Branch -> Annex (Maybe Git.Branch)
+needMerge currbranch headbranch = ifM (allM id checks)
( return (Just syncbranch)
, return Nothing
)
where
- checks =
- [ not <$> isBareRepo
- , inRepo (Git.Ref.exists syncbranch)
- , inRepo (Git.Branch.changed branch' syncbranch)
- ]
- syncbranch = syncBranch branch
- branch' = maybe branch (adjBranch . originalToAdjusted branch) madj
+ syncbranch = syncBranch headbranch
+ checks = case currbranch of
+ (Just _, madj) ->
+ let branch' = maybe headbranch (adjBranch . originalToAdjusted headbranch) madj
+ in
+ [ not <$> isBareRepo
+ , inRepo (Git.Ref.exists syncbranch)
+ , inRepo (Git.Branch.changed branch' syncbranch)
+ ]
+ (Nothing, _) ->
+ [ not <$> isBareRepo
+ , inRepo (Git.Ref.exists syncbranch)
+ ]
pushLocal :: SyncOptions -> CurrBranch -> CommandStart
pushLocal o b = stopUnless (notOnlyAnnex o) $ do
diff --git a/Command/TestRemote.hs b/Command/TestRemote.hs
index df4606b..dbef373 100644
--- a/Command/TestRemote.hs
+++ b/Command/TestRemote.hs
@@ -40,6 +40,7 @@ import "crypto-api" Crypto.Random
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
import qualified Data.Map as M
+import Data.Either
import Control.Concurrent.STM hiding (check)
cmd :: Command
@@ -213,15 +214,15 @@ mkTestTrees runannex mkrs mkunavailr mkexportr mkks = concat $
test :: RunAnnex -> Annex (Maybe Remote) -> Annex Key -> [TestTree]
test runannex mkr mkk =
[ check "removeKey when not present" $ \r k ->
- whenwritable r $ remove r k
+ whenwritable r $ isRight <$> tryNonAsync (remove r k)
, check ("present " ++ show False) $ \r k ->
whenwritable r $ present r k False
, check "storeKey" $ \r k ->
- whenwritable r $ store r k
+ whenwritable r $ isRight <$> tryNonAsync (store r k)
, check ("present " ++ show True) $ \r k ->
whenwritable r $ present r k True
, check "storeKey when already present" $ \r k ->
- whenwritable r $ store r k
+ whenwritable r $ isRight <$> tryNonAsync (store r k)
, check ("present " ++ show True) $ \r k -> present r k True
, check "retrieveKeyFile" $ \r k -> do
lockContentForRemoval k removeAnnex
@@ -251,7 +252,7 @@ test runannex mkr mkk =
get r k
, check "fsck downloaded object" fsck
, check "removeKey when present" $ \r k ->
- whenwritable r $ remove r k
+ whenwritable r $ isRight <$> tryNonAsync (remove r k)
, check ("present " ++ show False) $ \r k ->
whenwritable r $ present r k False
]
@@ -273,8 +274,9 @@ test runannex mkr mkk =
Nothing -> return True
Just verifier -> verifier k (serializeKey k)
get r k = getViaTmp (Remote.retrievalSecurityPolicy r) (RemoteVerify r) k $ \dest ->
- Remote.retrieveKeyFile r k (AssociatedFile Nothing)
- dest nullMeterUpdate
+ tryNonAsync (Remote.retrieveKeyFile r k (AssociatedFile Nothing) dest nullMeterUpdate) >>= \case
+ Right v -> return (True, v)
+ Left _ -> return (False, UnVerified)
store r k = Remote.storeKey r k (AssociatedFile Nothing) nullMeterUpdate
remove r k = Remote.removeKey r k
@@ -283,31 +285,31 @@ testExportTree runannex mkr mkk1 mkk2 =
[ check "check present export when not present" $ \ea k1 _k2 ->
not <$> checkpresentexport ea k1
, check "remove export when not present" $ \ea k1 _k2 ->
- removeexport ea k1
+ isRight <$> tryNonAsync (removeexport ea k1)
, check "store export" $ \ea k1 _k2 ->
- storeexport ea k1
+ isRight <$> tryNonAsync (storeexport ea k1)
, check "check present export after store" $ \ea k1 _k2 ->
checkpresentexport ea k1
, check "store export when already present" $ \ea k1 _k2 ->
- storeexport ea k1
+ isRight <$> tryNonAsync (storeexport ea k1)
, check "retrieve export" $ \ea k1 _k2 ->
retrieveexport ea k1
, check "store new content to export" $ \ea _k1 k2 ->
- storeexport ea k2
+ isRight <$> tryNonAsync (storeexport ea k2)
, check "check present export after store of new content" $ \ea _k1 k2 ->
checkpresentexport ea k2
, check "retrieve export new content" $ \ea _k1 k2 ->
retrieveexport ea k2
, check "remove export" $ \ea _k1 k2 ->
- removeexport ea k2
+ isRight <$> tryNonAsync (removeexport ea k2)
, check "check present export after remove" $ \ea _k1 k2 ->
not <$> checkpresentexport ea k2
, check "retrieve export fails after removal" $ \ea _k1 k2 ->
not <$> retrieveexport ea k2
, check "remove export directory" $ \ea _k1 _k2 ->
- removeexportdirectory ea
+ isRight <$> tryNonAsync (removeexportdirectory ea)
, check "remove export directory that is already removed" $ \ea _k1 _k2 ->
- removeexportdirectory ea
+ isRight <$> tryNonAsync (removeexportdirectory ea)
-- renames are not tested because remotes do not need to support them
]
where
@@ -327,30 +329,33 @@ testExportTree runannex mkr mkk1 mkk2 =
Remote.storeExport ea loc k testexportlocation nullMeterUpdate
retrieveexport ea k = withTmpFile "exported" $ \tmp h -> do
liftIO $ hClose h
- ifM (Remote.retrieveExport ea k testexportlocation tmp nullMeterUpdate)
- ( verifyKeyContent RetrievalAllKeysSecure AlwaysVerify UnVerified k tmp
- , return False
- )
+ tryNonAsync (Remote.retrieveExport ea k testexportlocation tmp nullMeterUpdate) >>= \case
+ Left _ -> return False
+ Right () -> verifyKeyContent RetrievalAllKeysSecure AlwaysVerify UnVerified k tmp
checkpresentexport ea k = Remote.checkPresentExport ea k testexportlocation
removeexport ea k = Remote.removeExport ea k testexportlocation
removeexportdirectory ea = case Remote.removeExportDirectory ea of
- Nothing -> return True
Just a -> a (mkExportDirectory (toRawFilePath testexportdirectory))
+ Nothing -> noop
testUnavailable :: RunAnnex -> Annex (Maybe Remote) -> Annex Key -> [TestTree]
testUnavailable runannex mkr mkk =
- [ check (== Right False) "removeKey" $ \r k ->
+ [ check isLeft "removeKey" $ \r k ->
Remote.removeKey r k
- , check (== Right False) "storeKey" $ \r k ->
+ , check isLeft "storeKey" $ \r k ->
Remote.storeKey r k (AssociatedFile Nothing) nullMeterUpdate
, check (`notElem` [Right True, Right False]) "checkPresent" $ \r k ->
Remote.checkPresent r k
, check (== Right False) "retrieveKeyFile" $ \r k ->
getViaTmp (Remote.retrievalSecurityPolicy r) (RemoteVerify r) k $ \dest ->
- Remote.retrieveKeyFile r k (AssociatedFile Nothing) dest nullMeterUpdate
- , check (== Right False) "retrieveKeyFileCheap" $ \r k ->
- getViaTmp (Remote.retrievalSecurityPolicy r) (RemoteVerify r) k $ \dest -> unVerified $
- Remote.retrieveKeyFileCheap r k (AssociatedFile Nothing) dest
+ tryNonAsync (Remote.retrieveKeyFile r k (AssociatedFile Nothing) dest nullMeterUpdate) >>= \case
+ Right v -> return (True, v)
+ Left _ -> return (False, UnVerified)
+ , check (== Right False) "retrieveKeyFileCheap" $ \r k -> case Remote.retrieveKeyFileCheap r of
+ Nothing -> return False
+ Just a -> getViaTmp (Remote.retrievalSecurityPolicy r) (RemoteVerify r) k $ \dest ->
+ unVerified $ isRight
+ <$> tryNonAsync (a k (AssociatedFile Nothing) dest)
]
where
check checkval desc a = testCase desc $
@@ -399,7 +404,7 @@ randKey :: Int -> Annex Key
randKey sz = withTmpFile "randkey" $ \f h -> do
gen <- liftIO (newGenIO :: IO SystemRandom)
case genBytes sz gen of
- Left e -> error $ "failed to generate random key: " ++ show e
+ Left e -> giveup $ "failed to generate random key: " ++ show e
Right (rand, _) -> liftIO $ B.hPut h rand
liftIO $ hClose h
let ks = KeySource
@@ -407,8 +412,9 @@ randKey sz = withTmpFile "randkey" $ \f h -> do
, contentLocation = toRawFilePath f
, inodeCache = Nothing
}
- k <- fromMaybe (error "failed to generate random key")
- <$> Backend.getKey Backend.Hash.testKeyBackend ks nullMeterUpdate
+ k <- case Backend.getKey Backend.Hash.testKeyBackend of
+ Just a -> a ks nullMeterUpdate
+ Nothing -> giveup "failed to generate random key (backend problem)"
_ <- moveAnnex k f
return k
diff --git a/Command/TransferKey.hs b/Command/TransferKey.hs
index 8561ef8..01e292c 100644
--- a/Command/TransferKey.hs
+++ b/Command/TransferKey.hs
@@ -52,16 +52,23 @@ start o key = startingCustomOutput key $ case fromToOptions o of
toPerform :: Key -> AssociatedFile -> Remote -> CommandPerform
toPerform key file remote = go Upload file $
upload (uuid remote) key file stdRetry $ \p -> do
- ok <- Remote.storeKey remote key file p
- when ok $
- Remote.logStatus remote key InfoPresent
- return ok
+ tryNonAsync (Remote.storeKey remote key file p) >>= \case
+ Right () -> do
+ Remote.logStatus remote key InfoPresent
+ return True
+ Left e -> do
+ warning (show e)
+ return False
fromPerform :: Key -> AssociatedFile -> Remote -> CommandPerform
fromPerform key file remote = go Upload file $
download (uuid remote) key file stdRetry $ \p ->
- getViaTmp (retrievalSecurityPolicy remote) (RemoteVerify remote) key $
- \t -> Remote.retrieveKeyFile remote key file t p
+ getViaTmp (retrievalSecurityPolicy remote) (RemoteVerify remote) key $ \t ->
+ tryNonAsync (Remote.retrieveKeyFile remote key file t p) >>= \case
+ Right v -> return (True, v)
+ Left e -> do
+ warning (show e)
+ return (False, UnVerified)
go :: Direction -> AssociatedFile -> (NotifyWitness -> Annex Bool) -> CommandPerform
go direction file a = notifyTransfer direction file a >>= liftIO . exitBool
diff --git a/Command/TransferKeys.hs b/Command/TransferKeys.hs
index f0c112a..cc41dc2 100644
--- a/Command/TransferKeys.hs
+++ b/Command/TransferKeys.hs
@@ -38,14 +38,21 @@ start = do
runner (TransferRequest direction remote key file)
| direction == Upload = notifyTransfer direction file $
upload (Remote.uuid remote) key file stdRetry $ \p -> do
- ok <- Remote.storeKey remote key file p
- when ok $
- Remote.logStatus remote key InfoPresent
- return ok
+ tryNonAsync (Remote.storeKey remote key file p) >>= \case
+ Left e -> do
+ warning (show e)
+ return False
+ Right () -> do
+ Remote.logStatus remote key InfoPresent
+ return True
| otherwise = notifyTransfer direction file $
download (Remote.uuid remote) key file stdRetry $ \p ->
getViaTmp (Remote.retrievalSecurityPolicy remote) (RemoteVerify remote) key $ \t -> do
- r <- Remote.retrieveKeyFile remote key file t p
+ r <- tryNonAsync (Remote.retrieveKeyFile remote key file t p) >>= \case
+ Left e -> do
+ warning (show e)
+ return (False, UnVerified)
+ Right v -> return (True, v)
-- Make sure we get the current
-- associated files data for the key,
-- not old cached data.
diff --git a/Command/Upgrade.hs b/Command/Upgrade.hs
index 2b343d9..ffdf368 100644
--- a/Command/Upgrade.hs
+++ b/Command/Upgrade.hs
@@ -13,12 +13,13 @@ import Annex.Version
import Annex.Init
cmd :: Command
-cmd = dontCheck repoExists $
- -- ^ because an old version may not seem to exist
+cmd = dontCheck
+ -- because an old version may not seem to exist
-- and also, this avoids automatic silent upgrades before
-- this command can start up.
+ repoExists $
+ -- avoid upgrading repo out from under daemon
noDaemonRunning $
- -- ^ avoid upgrading repo out from under daemon
command "upgrade" SectionMaintenance "upgrade repository"
paramNothing (seek <$$> optParser)
diff --git a/Command/Whereis.hs b/Command/Whereis.hs
index 8e0ecb9..0b850ef 100644
--- a/Command/Whereis.hs
+++ b/Command/Whereis.hs
@@ -1,10 +1,12 @@
{- git-annex command
-
- - Copyright 2010-2016 Joey Hess <id@joeyh.name>
+ - Copyright 2010-2020 Joey Hess <id@joeyh.name>
-
- Licensed under the GNU AGPL version 3 or higher.
-}
+{-# LANGUAGE TupleSections #-}
+
module Command.Whereis where
import Command
@@ -13,6 +15,9 @@ import Logs.Trust
import Logs.Web
import Remote.Web (getWebUrls)
import Annex.UUID
+import qualified Utility.Format
+import qualified Command.Find
+import Types.ActionItem
import qualified Data.Map as M
import qualified Data.Vector as V
@@ -27,6 +32,7 @@ data WhereisOptions = WhereisOptions
{ whereisFiles :: CmdParams
, keyOptions :: Maybe KeyOptions
, batchOption :: BatchMode
+ , formatOption :: Maybe Utility.Format.Format
}
optParser :: CmdParamsDesc -> Parser WhereisOptions
@@ -34,40 +40,74 @@ optParser desc = WhereisOptions
<$> cmdParams desc
<*> optional parseKeyOptions
<*> parseBatchOption
+ <*> optional parseFormatOption
+
+parseFormatOption :: Parser Utility.Format.Format
+parseFormatOption = option (Utility.Format.gen <$> str)
+ ( long "format" <> metavar paramFormat
+ <> help "control format of output"
+ )
seek :: WhereisOptions -> CommandSeek
seek o = do
m <- remoteMap id
- let go = whenAnnexed $ start m
+ let go = whenAnnexed $ start o m
case batchOption o of
Batch fmt -> batchFilesMatching fmt (go . toRawFilePath)
NoBatch ->
withKeyOptions (keyOptions o) False
- (commandAction . startKeys m)
+ (commandAction . startKeys o m)
(withFilesInGit (commandAction . go))
=<< workTreeItems (whereisFiles o)
-start :: M.Map UUID Remote -> RawFilePath -> Key -> CommandStart
-start remotemap file key = startKeys remotemap (key, mkActionItem (key, afile))
+start :: WhereisOptions -> M.Map UUID Remote -> RawFilePath -> Key -> CommandStart
+start o remotemap file key =
+ startKeys o remotemap (key, mkActionItem (key, afile))
where
afile = AssociatedFile (Just file)
-startKeys :: M.Map UUID Remote -> (Key, ActionItem) -> CommandStart
-startKeys remotemap (key, ai) = starting "whereis" ai $ perform remotemap key
+startKeys :: WhereisOptions -> M.Map UUID Remote -> (Key, ActionItem) -> CommandStart
+startKeys o remotemap (key, ai)
+ | isJust (formatOption o) = startingCustomOutput ai go
+ | otherwise = starting "whereis" ai go
+ where
+ go = perform o remotemap key ai
-perform :: M.Map UUID Remote -> Key -> CommandPerform
-perform remotemap key = do
+perform :: WhereisOptions -> M.Map UUID Remote -> Key -> ActionItem -> CommandPerform
+perform o remotemap key ai = do
locations <- keyLocations key
urls <- getUUIDUrls key locations remotemap
(untrustedlocations, safelocations) <- trustPartition UnTrusted locations
- let num = length safelocations
- showNote $ show num ++ " " ++ copiesplural num
- pp <- ppwhereis "whereis" safelocations urls
- unless (null safelocations) $ showLongNote pp
- pp' <- ppwhereis "untrusted" untrustedlocations urls
- unless (null untrustedlocations) $ showLongNote $ untrustedheader ++ pp'
-
- mapM_ (showRemoteUrls remotemap) urls
+ case formatOption o of
+ Nothing -> do
+ let num = length safelocations
+ showNote $ show num ++ " " ++ copiesplural num
+ pp <- ppwhereis "whereis" safelocations urls
+ unless (null safelocations) $ showLongNote pp
+ pp' <- ppwhereis "untrusted" untrustedlocations urls
+ unless (null untrustedlocations) $ showLongNote $ untrustedheader ++ pp'
+
+ mapM_ (showRemoteUrls remotemap) urls
+ Just formatter -> liftIO $ do
+ let vs = catMaybes
+ [ fmap (("file",) . fromRawFilePath)
+ (actionItemWorkTreeFile ai)
+ ] ++ Command.Find.keyVars key
+ let showformatted muuid murl = putStr $
+ Utility.Format.format formatter $
+ M.fromList $ vs ++ catMaybes
+ [ fmap ("uuid",) muuid
+ , fmap ("url",) murl
+ ]
+ let showformatted' muuid
+ | Utility.Format.formatContainsVar "url" formatter =
+ forM_ (concatMap snd urls) $
+ showformatted muuid . Just
+ | otherwise = showformatted muuid Nothing
+ if Utility.Format.formatContainsVar "uuid" formatter
+ then forM_ locations $
+ showformatted' . Just . fromUUID
+ else showformatted' Nothing
if null safelocations then stop else next $ return True
where
diff --git a/Creds.hs b/Creds.hs
index 766cf70..2852906 100644
--- a/Creds.hs
+++ b/Creds.hs
@@ -31,7 +31,7 @@ import Utility.FileMode
import Crypto
import Types.Remote (RemoteConfig, RemoteConfigField)
import Types.ProposedAccepted
-import Remote.Helper.Encryptable (remoteCipher, remoteCipher', embedCreds, EncryptionIsSetup, extractCipher, parseEncryptionConfig)
+import Remote.Helper.Encryptable (remoteCipher, remoteCipher', embedCreds, EncryptionIsSetup, extractCipher)
import Utility.Env (getEnv)
import qualified Data.ByteString.Lazy.Char8 as L
@@ -56,10 +56,8 @@ data CredPairStorage = CredPairStorage
- if that's going to be done, so that the creds can be encrypted using the
- cipher. The EncryptionIsSetup is witness to that being the case.
-}
-setRemoteCredPair :: EncryptionIsSetup -> RemoteConfig -> RemoteGitConfig -> CredPairStorage -> Maybe CredPair -> Annex RemoteConfig
-setRemoteCredPair = setRemoteCredPair' id go
- where
- go c = either (const (ParsedRemoteConfig mempty c)) id (parseEncryptionConfig c)
+setRemoteCredPair :: EncryptionIsSetup -> ParsedRemoteConfig -> RemoteGitConfig -> CredPairStorage -> Maybe CredPair -> Annex RemoteConfig
+setRemoteCredPair encsetup pc = setRemoteCredPair' id (const pc) encsetup (unparsedRemoteConfig pc)
setRemoteCredPair'
:: (ProposedAccepted String -> a)
@@ -106,7 +104,7 @@ getRemoteCredPair c gc storage = maybe fromcache (return . Just) =<< fromenv
fromconfig = do
let key = credPairRemoteField storage
mcipher <- remoteCipher' c gc
- case (fromProposedAccepted <$> getRemoteConfigValue key c, mcipher) of
+ case (getRemoteConfigValue key c, mcipher) of
(Nothing, _) -> return Nothing
(Just enccreds, Just (cipher, storablecipher)) ->
fromenccreds enccreds cipher storablecipher
diff --git a/Git/Repair.hs b/Git/Repair.hs
index f7a91ca..f81aa78 100644
--- a/Git/Repair.hs
+++ b/Git/Repair.hs
@@ -122,24 +122,26 @@ retrieveMissingObjects missing referencerepo r
)
pullremotes tmpr (rmt:rmts) fetchrefs ms
| not (foundBroken ms) = return ms
- | otherwise = do
- putStrLn $ "Trying to recover missing objects from remote " ++ repoDescribe rmt ++ "."
- ifM (fetchfrom (repoLocation rmt) fetchrefs tmpr)
- ( do
- void $ explodePacks tmpr
- void $ copyObjects tmpr r
- case ms of
- FsckFailed -> pullremotes tmpr rmts fetchrefs ms
- FsckFoundMissing s t -> do
- stillmissing <- findMissing (S.toList s) r
- pullremotes tmpr rmts fetchrefs (FsckFoundMissing stillmissing t)
- , pullremotes tmpr rmts fetchrefs ms
- )
- fetchfrom fetchurl ps fetchr = runBool ps' fetchr'
+ | otherwise = case remoteName rmt of
+ Just n -> do
+ putStrLn $ "Trying to recover missing objects from remote " ++ n ++ "."
+ ifM (fetchfrom n fetchrefs tmpr)
+ ( do
+ void $ explodePacks tmpr
+ void $ copyObjects tmpr r
+ case ms of
+ FsckFailed -> pullremotes tmpr rmts fetchrefs ms
+ FsckFoundMissing s t -> do
+ stillmissing <- findMissing (S.toList s) r
+ pullremotes tmpr rmts fetchrefs (FsckFoundMissing stillmissing t)
+ , pullremotes tmpr rmts fetchrefs ms
+ )
+ Nothing -> pullremotes tmpr rmts fetchrefs ms
+ fetchfrom loc ps fetchr = runBool ps' fetchr'
where
ps' =
[ Param "fetch"
- , Param fetchurl
+ , Param loc
, Param "--force"
, Param "--update-head-ok"
, Param "--quiet"
diff --git a/Remote.hs b/Remote.hs
index a1a07f9..6670925 100644
--- a/Remote.hs
+++ b/Remote.hs
@@ -11,6 +11,8 @@ module Remote (
Remote,
uuid,
name,
+ action,
+ verifiedAction,
storeKey,
retrieveKeyFile,
retrieveKeyFileCheap,
@@ -77,6 +79,21 @@ import Config.DynamicConfig
import Git.Types (RemoteName, ConfigKey(..), fromConfigValue)
import Utility.Aeson
+{- Runs an action that may throw exceptions, catching and displaying them. -}
+action :: Annex () -> Annex Bool
+action a = tryNonAsync a >>= \case
+ Right () -> return True
+ Left e -> do
+ warning (show e)
+ return False
+
+verifiedAction :: Annex Verification -> Annex (Bool, Verification)
+verifiedAction a = tryNonAsync a >>= \case
+ Right v -> return (True, v)
+ Left e -> do
+ warning (show e)
+ return (False, UnVerified)
+
{- Map from UUIDs of Remotes to a calculated value. -}
remoteMap :: (Remote -> v) -> Annex (M.Map UUID v)
remoteMap mkv = remoteMap' mkv (pure . mkk)
diff --git a/Remote/Adb.hs b/Remote/Adb.hs
index e95905b..755dbed 100644
--- a/Remote/Adb.hs
+++ b/Remote/Adb.hs
@@ -64,8 +64,8 @@ gen r u rc gc rs = do
, cost = semiExpensiveRemoteCost
, name = Git.repoDescribe r
, storeKey = storeKeyDummy
- , retrieveKeyFile = retreiveKeyFileDummy
- , retrieveKeyFileCheap = \_ _ _ -> return False
+ , retrieveKeyFile = retrieveKeyFileDummy
+ , retrieveKeyFileCheap = Nothing
, retrievalSecurityPolicy = RetrievalAllKeysSecure
, removeKey = removeKeyDummy
, lockContent = Nothing
@@ -108,10 +108,10 @@ gen r u rc gc rs = do
, remoteStateHandle = rs
}
return $ Just $ specialRemote c
- (simplyPrepare $ store serial adir)
- (simplyPrepare $ retrieve serial adir)
- (simplyPrepare $ remove serial adir)
- (simplyPrepare $ checkKey this serial adir)
+ (store serial adir)
+ (retrieve serial adir)
+ (remove serial adir)
+ (checkKey this serial adir)
this
where
adir = maybe (giveup "missing androiddirectory") AndroidPath
@@ -160,7 +160,8 @@ adbSetup _ mu _ c gc = do
store :: AndroidSerial -> AndroidPath -> Storer
store serial adir = fileStorer $ \k src _p ->
let dest = androidLocation adir k
- in store' serial dest src
+ in unlessM (store' serial dest src) $
+ giveup "adb failed"
store' :: AndroidSerial -> AndroidPath -> FilePath -> Annex Bool
store' serial dest src = store'' serial dest src (return True)
@@ -185,20 +186,25 @@ store'' serial dest src canoverwrite = checkAdbInPath False $ do
retrieve :: AndroidSerial -> AndroidPath -> Retriever
retrieve serial adir = fileRetriever $ \dest k _p ->
let src = androidLocation adir k
- in unlessM (retrieve' serial src dest) $
- giveup "adb pull failed"
+ in retrieve' serial src dest
-retrieve' :: AndroidSerial -> AndroidPath -> FilePath -> Annex Bool
-retrieve' serial src dest = checkAdbInPath False $ do
- showOutput -- make way for adb pull output
- liftIO $ boolSystem "adb" $ mkAdbCommand serial
- [ Param "pull"
- , File $ fromAndroidPath src
- , File dest
- ]
+retrieve' :: AndroidSerial -> AndroidPath -> FilePath -> Annex ()
+retrieve' serial src dest =
+ unlessM go $
+ giveup "adb pull failed"
+ where
+ go = checkAdbInPath False $ do
+ showOutput -- make way for adb pull output
+ liftIO $ boolSystem "adb" $ mkAdbCommand serial
+ [ Param "pull"
+ , File $ fromAndroidPath src
+ , File dest
+ ]
remove :: AndroidSerial -> AndroidPath -> Remover
-remove serial adir k = remove' serial (androidLocation adir k)
+remove serial adir k =
+ unlessM (remove' serial (androidLocation adir k)) $
+ giveup "adb failed"
remove' :: AndroidSerial -> AndroidPath -> Annex Bool
remove' serial aloc = adbShellBool serial
@@ -231,25 +237,31 @@ androidHashDir adir k = AndroidPath $
where
hdir = replace [pathSeparator] "/" (fromRawFilePath (hashDirLower def k))
-storeExportM :: AndroidSerial -> AndroidPath -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex Bool
-storeExportM serial adir src _k loc _p = store' serial dest src
+storeExportM :: AndroidSerial -> AndroidPath -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex ()
+storeExportM serial adir src _k loc _p =
+ unlessM (store' serial dest src) $
+ giveup "adb failed"
where
dest = androidExportLocation adir loc
-retrieveExportM :: AndroidSerial -> AndroidPath -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Bool
+retrieveExportM :: AndroidSerial -> AndroidPath -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex ()
retrieveExportM serial adir _k loc dest _p = retrieve' serial src dest
where
src = androidExportLocation adir loc
-removeExportM :: AndroidSerial -> AndroidPath -> Key -> ExportLocation -> Annex Bool
-removeExportM serial adir _k loc = remove' serial aloc
+removeExportM :: AndroidSerial -> AndroidPath -> Key -> ExportLocation -> Annex ()
+removeExportM serial adir _k loc =
+ unlessM (remove' serial aloc) $
+ giveup "adb failed"
where
aloc = androidExportLocation adir loc
-removeExportDirectoryM :: AndroidSerial -> AndroidPath -> ExportDirectory -> Annex Bool
-removeExportDirectoryM serial abase dir = adbShellBool serial
- [Param "rm", Param "-rf", File (fromAndroidPath adir)]
+removeExportDirectoryM :: AndroidSerial -> AndroidPath -> ExportDirectory -> Annex ()
+removeExportDirectoryM serial abase dir =
+ unlessM go $
+ giveup "adb failed"
where
+ go = adbShellBool serial [Param "rm", Param "-rf", File (fromAndroidPath adir)]
adir = androidExportLocation abase (mkExportLocation (fromExportDirectory dir))
checkPresentExportM :: Remote -> AndroidSerial -> AndroidPath -> Key -> ExportLocation -> Annex Bool
@@ -257,8 +269,11 @@ checkPresentExportM r serial adir _k loc = checkKey' r serial aloc
where
aloc = androidExportLocation adir loc
-renameExportM :: AndroidSerial -> AndroidPath -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe Bool)
-renameExportM serial adir _k old new = Just <$> adbShellBool serial ps
+renameExportM :: AndroidSerial -> AndroidPath -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe ())
+renameExportM serial adir _k old new = do
+ unlessM (adbShellBool serial ps) $
+ giveup "adb failed"
+ return (Just ())
where
oldloc = fromAndroidPath $ androidExportLocation adir old
newloc = fromAndroidPath $ androidExportLocation adir new
@@ -300,33 +315,30 @@ listImportableContentsM serial adir =
-- connection is resonably fast, it's probably as good as
-- git's handling of similar situations with files being modified while
-- it's updating the working tree for a merge.
-retrieveExportWithContentIdentifierM :: AndroidSerial -> AndroidPath -> ExportLocation -> ContentIdentifier -> FilePath -> Annex (Maybe Key) -> MeterUpdate -> Annex (Maybe Key)
-retrieveExportWithContentIdentifierM serial adir loc cid dest mkkey _p = catchDefaultIO Nothing $
- ifM (retrieve' serial src dest)
- ( do
- k <- mkkey
- currcid <- getExportContentIdentifier serial adir loc
- return $ if currcid == Right (Just cid)
- then k
- else Nothing
- , return Nothing
- )
+retrieveExportWithContentIdentifierM :: AndroidSerial -> AndroidPath -> ExportLocation -> ContentIdentifier -> FilePath -> Annex Key -> MeterUpdate -> Annex Key
+retrieveExportWithContentIdentifierM serial adir loc cid dest mkkey _p = do
+ retrieve' serial src dest
+ k <- mkkey
+ currcid <- getExportContentIdentifier serial adir loc
+ if currcid == Right (Just cid)
+ then return k
+ else giveup "the file on the android device has changed"
where
src = androidExportLocation adir loc
-storeExportWithContentIdentifierM :: AndroidSerial -> AndroidPath -> FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex (Either String ContentIdentifier)
+storeExportWithContentIdentifierM :: AndroidSerial -> AndroidPath -> FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex ContentIdentifier
storeExportWithContentIdentifierM serial adir src _k loc overwritablecids _p =
-- Check if overwrite is safe before sending, because sending the
-- file is expensive and don't want to do it unncessarily.
ifM checkcanoverwrite
( ifM (store'' serial dest src checkcanoverwrite)
- ( getExportContentIdentifier serial adir loc >>= return . \case
- Right (Just cid) -> Right cid
- Right Nothing -> Left "adb failed to store file"
- Left _ -> Left "unable to get content identifier for file stored on adtb"
- , return $ Left "adb failed to store file"
+ ( getExportContentIdentifier serial adir loc >>= \case
+ Right (Just cid) -> return cid
+ Right Nothing -> giveup "adb failed to store file"
+ Left _ -> giveup "unable to get content identifier for file stored by adb"
+ , giveup "adb failed to store file"
)
- , return $ Left "unsafe to overwrite file"
+ , giveup "unsafe to overwrite file"
)
where
dest = androidExportLocation adir loc
@@ -336,13 +348,15 @@ storeExportWithContentIdentifierM serial adir src _k loc overwritablecids _p =
Right Nothing -> True
_ -> False
-removeExportWithContentIdentifierM :: AndroidSerial -> AndroidPath -> Key -> ExportLocation -> [ContentIdentifier] -> Annex Bool
-removeExportWithContentIdentifierM serial adir k loc removeablecids = catchBoolIO $
+removeExportWithContentIdentifierM :: AndroidSerial -> AndroidPath -> Key -> ExportLocation -> [ContentIdentifier] -> Annex ()
+removeExportWithContentIdentifierM serial adir k loc removeablecids =
getExportContentIdentifier serial adir loc >>= \case
- Right Nothing -> return True
- Right (Just cid) | cid `elem` removeablecids ->
- removeExportM serial adir k loc
- _ -> return False
+ Right Nothing -> return ()
+ Right (Just cid)
+ | cid `elem` removeablecids ->
+ removeExportM serial adir k loc
+ | otherwise -> giveup "file on Android device is modified, cannot remove"
+ Left _ -> giveup "unable to access Android device"
checkPresentExportWithContentIdentifierM :: AndroidSerial -> AndroidPath -> Key -> ExportLocation -> [ContentIdentifier] -> Annex Bool
checkPresentExportWithContentIdentifierM serial adir _k loc knowncids =
diff --git a/Remote/BitTorrent.hs b/Remote/BitTorrent.hs
index 245081c..ab1af9e 100644
--- a/Remote/BitTorrent.hs
+++ b/Remote/BitTorrent.hs
@@ -64,7 +64,7 @@ gen r _ rc gc rs = do
, name = Git.repoDescribe r
, storeKey = uploadKey
, retrieveKeyFile = downloadKey
- , retrieveKeyFileCheap = downloadKeyCheap
+ , retrieveKeyFileCheap = Nothing
-- Bittorrent does its own hash checks.
, retrievalSecurityPolicy = RetrievalAllKeysSecure
, removeKey = dropKey
@@ -91,35 +91,29 @@ gen r _ rc gc rs = do
, remoteStateHandle = rs
}
-downloadKey :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)
-downloadKey key _file dest p = unVerified $
+downloadKey :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex Verification
+downloadKey key _file dest p = do
get . map (torrentUrlNum . fst . getDownloader) =<< getBitTorrentUrls key
+ return UnVerified
where
- get [] = do
- warning "could not download torrent"
- return False
+ get [] = giveup "could not download torrent"
get urls = do
showOutput -- make way for download progress bar
- untilTrue urls $ \(u, filenum) -> do
+ ok <- untilTrue urls $ \(u, filenum) -> do
registerTorrentCleanup u
checkDependencies
ifM (downloadTorrentFile u)
( downloadTorrentContent key u dest filenum p
, return False
)
+ unless ok $
+ get []
-downloadKeyCheap :: Key -> AssociatedFile -> FilePath -> Annex Bool
-downloadKeyCheap _ _ _ = return False
+uploadKey :: Key -> AssociatedFile -> MeterUpdate -> Annex ()
+uploadKey _ _ _ = giveup "upload to bittorrent not supported"
-uploadKey :: Key -> AssociatedFile -> MeterUpdate -> Annex Bool
-uploadKey _ _ _ = do
- warning "upload to bittorrent not supported"
- return False
-
-dropKey :: Key -> Annex Bool
-dropKey k = do
- mapM_ (setUrlMissing k) =<< getBitTorrentUrls k
- return True
+dropKey :: Key -> Annex ()
+dropKey k = mapM_ (setUrlMissing k) =<< getBitTorrentUrls k
{- We punt and don't try to check if a torrent has enough seeders
- with all the pieces etc. That would be quite hard.. and even if
@@ -401,8 +395,8 @@ torrentContents :: URLString -> Annex UrlContents
torrentContents u = convert
<$> (liftIO . torrentFileSizes =<< tmpTorrentFile u)
where
- convert [(fn, sz)] = UrlContents (Just sz) (Just (mkSafeFilePath fn))
+ convert [(fn, sz)] = UrlContents (Just sz) (Just fn)
convert l = UrlMulti $ map mkmulti (zip l [1..])
mkmulti ((fn, sz), n) =
- (torrentUrlWithNum u n, Just sz, mkSafeFilePath $ joinPath $ drop 1 $ splitPath fn)
+ (torrentUrlWithNum u n, Just sz, joinPath $ drop 1 $ splitPath fn)
diff --git a/Remote/Bup.hs b/Remote/Bup.hs
index 51c0ebd..fda1527 100644
--- a/Remote/Bup.hs
+++ b/Remote/Bup.hs
@@ -70,8 +70,8 @@ gen r u rc gc rs = do
, cost = cst
, name = Git.repoDescribe r
, storeKey = storeKeyDummy
- , retrieveKeyFile = retreiveKeyFileDummy
- , retrieveKeyFileCheap = retrieveCheap buprepo
+ , retrieveKeyFile = retrieveKeyFileDummy
+ , retrieveKeyFileCheap = Nothing
-- Bup uses git, which cryptographically verifies content
-- (with SHA1, but sufficiently for this).
, retrievalSecurityPolicy = RetrievalAllKeysSecure
@@ -105,10 +105,10 @@ gen r u rc gc rs = do
{ chunkConfig = NoChunks
}
return $ Just $ specialRemote' specialcfg c
- (simplyPrepare $ store this buprepo)
- (simplyPrepare $ retrieve buprepo)
- (simplyPrepare $ remove buprepo)
- (simplyPrepare $ checkKey r bupr')
+ (store this buprepo)
+ (retrieve buprepo)
+ (remove buprepo)
+ (checkKey r bupr')
this
where
buprepo = fromMaybe (giveup "missing buprepo") $ remoteAnnexBupRepo gc
@@ -156,9 +156,7 @@ store r buprepo = byteStorer $ \k b p -> do
showOutput -- make way for bup output
let cmd = proc "bup" (toCommand params)
quiet <- commandProgressDisabled
- let feeder = \h -> do
- meteredWrite p h b
- return True
+ let feeder = \h -> meteredWrite p h b
liftIO $ if quiet
then feedWithQuietOutput createProcessSuccess cmd feeder
else withHandle StdinHandle createProcessSuccess cmd feeder
@@ -171,9 +169,6 @@ retrieve buprepo = byteRetriever $ \k sink -> do
liftIO (hClose h >> forceSuccessProcess p pid)
`after` (sink =<< liftIO (L.hGetContents h))
-retrieveCheap :: BupRepo -> Key -> AssociatedFile -> FilePath -> Annex Bool
-retrieveCheap _ _ _ _ = return False
-
{- Cannot revert having stored a key in bup, but at least the data for the
- key will be used for deltaing data of other keys stored later.
-
@@ -182,8 +177,7 @@ retrieveCheap _ _ _ _ = return False
remove :: BupRepo -> Remover
remove buprepo k = do
go =<< liftIO (bup2GitRemote buprepo)
- warning "content cannot be completely removed from bup remote"
- return True
+ giveup "content cannot be completely removed from bup remote"
where
go r
| Git.repoIsUrl r = void $ onBupRemote r boolSystem "git" params
diff --git a/Remote/Ddar.hs b/Remote/Ddar.hs
index c8847e1..2cf5976 100644
--- a/Remote/Ddar.hs
+++ b/Remote/Ddar.hs
@@ -60,10 +60,10 @@ gen r u rc gc rs = do
{ chunkConfig = NoChunks
}
return $ Just $ specialRemote' specialcfg c
- (simplyPrepare $ store ddarrepo)
- (simplyPrepare $ retrieve ddarrepo)
- (simplyPrepare $ remove ddarrepo)
- (simplyPrepare $ checkKey ddarrepo)
+ (store ddarrepo)
+ (retrieve ddarrepo)
+ (remove ddarrepo)
+ (checkKey ddarrepo)
(this c cst)
where
this c cst = Remote
@@ -71,8 +71,8 @@ gen r u rc gc rs = do
, cost = cst
, name = Git.repoDescribe r
, storeKey = storeKeyDummy
- , retrieveKeyFile = retreiveKeyFileDummy
- , retrieveKeyFileCheap = retrieveCheap
+ , retrieveKeyFile = retrieveKeyFileDummy
+ , retrieveKeyFileCheap = Nothing
-- ddar communicates over ssh, not subject to http redirect
-- type attacks
, retrievalSecurityPolicy = RetrievalAllKeysSecure
@@ -127,7 +127,8 @@ store ddarrepo = fileStorer $ \k src _p -> do
, Param $ ddarRepoLocation ddarrepo
, File src
]
- liftIO $ boolSystem "ddar" params
+ unlessM (liftIO $ boolSystem "ddar" params) $
+ giveup "ddar failed"
{- Convert remote DdarRepo to host and path on remote end -}
splitRemoteDdarRepo :: DdarRepo -> (SshHost, String)
@@ -161,14 +162,12 @@ retrieve ddarrepo = byteRetriever $ \k sink -> do
liftIO (hClose h >> forceSuccessProcess p pid)
`after` (sink =<< liftIO (L.hGetContents h))
-retrieveCheap :: Key -> AssociatedFile -> FilePath -> Annex Bool
-retrieveCheap _ _ _ = return False
-
remove :: DdarRepo -> Remover
remove ddarrepo key = do
(cmd, params) <- ddarRemoteCall NoConsumeStdin ddarrepo 'd'
[Param $ serializeKey key]
- liftIO $ boolSystem cmd params
+ unlessM (liftIO $ boolSystem cmd params) $
+ giveup "ddar failed to remove"
ddarDirectoryExists :: DdarRepo -> Annex (Either String Bool)
ddarDirectoryExists ddarrepo
diff --git a/Remote/Directory.hs b/Remote/Directory.hs
index 60247d1..37218bf 100644
--- a/Remote/Directory.hs
+++ b/Remote/Directory.hs
@@ -60,16 +60,16 @@ gen r u rc gc rs = do
cst <- remoteCost gc cheapRemoteCost
let chunkconfig = getChunkConfig c
return $ Just $ specialRemote c
- (prepareStore dir chunkconfig)
+ (storeKeyM dir chunkconfig)
(retrieveKeyFileM dir chunkconfig)
- (simplyPrepare $ removeKeyM dir)
- (simplyPrepare $ checkPresentM dir chunkconfig)
+ (removeKeyM dir)
+ (checkPresentM dir chunkconfig)
Remote
{ uuid = u
, cost = cst
, name = Git.repoDescribe r
, storeKey = storeKeyDummy
- , retrieveKeyFile = retreiveKeyFileDummy
+ , retrieveKeyFile = retrieveKeyFileDummy
, retrieveKeyFileCheap = retrieveKeyFileCheapM dir chunkconfig
, retrievalSecurityPolicy = RetrievalAllKeysSecure
, removeKey = removeKeyDummy
@@ -154,10 +154,12 @@ storeDir d k = addTrailingPathSeparator $
{- Check if there is enough free disk space in the remote's directory to
- store the key. Note that the unencrypted key size is checked. -}
-prepareStore :: FilePath -> ChunkConfig -> Preparer Storer
-prepareStore d chunkconfig = checkPrepare (checkDiskSpaceDirectory d)
- (byteStorer $ store d chunkconfig)
- where
+storeKeyM :: FilePath -> ChunkConfig -> Storer
+storeKeyM d chunkconfig k c m =
+ ifM (checkDiskSpaceDirectory d k)
+ ( byteStorer (store d chunkconfig) k c m
+ , giveup "Not enough free disk space."
+ )
checkDiskSpaceDirectory :: FilePath -> Key -> Annex Bool
checkDiskSpaceDirectory d k = do
@@ -168,16 +170,16 @@ checkDiskSpaceDirectory d k = do
<*> getFileStatus annexdir
checkDiskSpace (Just d) k 0 samefilesystem
-store :: FilePath -> ChunkConfig -> Key -> L.ByteString -> MeterUpdate -> Annex Bool
+store :: FilePath -> ChunkConfig -> Key -> L.ByteString -> MeterUpdate -> Annex ()
store d chunkconfig k b p = liftIO $ do
void $ tryIO $ createDirectoryUnder d tmpdir
case chunkconfig of
- LegacyChunks chunksize -> Legacy.store d chunksize (finalizeStoreGeneric d) k b p tmpdir destdir
+ LegacyChunks chunksize ->
+ Legacy.store d chunksize (finalizeStoreGeneric d) k b p tmpdir destdir
_ -> do
let tmpf = tmpdir </> kf
meteredWriteFile p tmpf b
finalizeStoreGeneric d tmpdir destdir
- return True
where
tmpdir = addTrailingPathSeparator $ d </> "tmp" </> kf
kf = fromRawFilePath (keyFile k)
@@ -198,26 +200,24 @@ finalizeStoreGeneric d tmp dest = do
mapM_ preventWrite =<< dirContents dest
preventWrite dest
-retrieveKeyFileM :: FilePath -> ChunkConfig -> Preparer Retriever
+retrieveKeyFileM :: FilePath -> ChunkConfig -> Retriever
retrieveKeyFileM d (LegacyChunks _) = Legacy.retrieve locations d
-retrieveKeyFileM d _ = simplyPrepare $ byteRetriever $ \k sink ->
+retrieveKeyFileM d _ = byteRetriever $ \k sink ->
sink =<< liftIO (L.readFile =<< getLocation d k)
-retrieveKeyFileCheapM :: FilePath -> ChunkConfig -> Key -> AssociatedFile -> FilePath -> Annex Bool
+retrieveKeyFileCheapM :: FilePath -> ChunkConfig -> Maybe (Key -> AssociatedFile -> FilePath -> Annex ())
-- no cheap retrieval possible for chunks
-retrieveKeyFileCheapM _ (UnpaddedChunks _) _ _ _ = return False
-retrieveKeyFileCheapM _ (LegacyChunks _) _ _ _ = return False
+retrieveKeyFileCheapM _ (UnpaddedChunks _) = Nothing
+retrieveKeyFileCheapM _ (LegacyChunks _) = Nothing
#ifndef mingw32_HOST_OS
-retrieveKeyFileCheapM d NoChunks k _af f = liftIO $ catchBoolIO $ do
+retrieveKeyFileCheapM d NoChunks = Just $ \k _af f -> liftIO $ do
file <- absPath =<< getLocation d k
ifM (doesFileExist file)
- ( do
- createSymbolicLink file f
- return True
- , return False
+ ( createSymbolicLink file f
+ , giveup "content file not present in remote"
)
#else
-retrieveKeyFileCheapM _ _ _ _ _ = return False
+retrieveKeyFileCheapM _ _ = Nothing
#endif
removeKeyM :: FilePath -> Remover
@@ -226,14 +226,14 @@ removeKeyM d k = liftIO $ removeDirGeneric d (storeDir d k)
{- Removes the directory, which must be located under the topdir.
-
- Succeeds even on directories and contents that do not have write
- - permission.
+ - permission, if it's possible to turn the write bit on.
-
- If the directory does not exist, succeeds as long as the topdir does
- exist. If the topdir does not exist, fails, because in this case the
- remote is not currently accessible and probably still has the content
- we were supposed to remove from it.
-}
-removeDirGeneric :: FilePath -> FilePath -> IO Bool
+removeDirGeneric :: FilePath -> FilePath -> IO ()
removeDirGeneric topdir dir = do
void $ tryIO $ allowWrite dir
#ifdef mingw32_HOST_OS
@@ -241,12 +241,11 @@ removeDirGeneric topdir dir = do
- before it can delete them. -}
void $ tryIO $ mapM_ allowWrite =<< dirContents dir
#endif
- ok <- catchBoolIO $ do
- removeDirectoryRecursive dir
- return True
- if ok
- then return ok
- else doesDirectoryExist topdir <&&> (not <$> doesDirectoryExist dir)
+ tryNonAsync (removeDirectoryRecursive dir) >>= \case
+ Right () -> return ()
+ Left e ->
+ unlessM (doesDirectoryExist topdir <&&> (not <$> doesDirectoryExist dir)) $
+ throwM e
checkPresentM :: FilePath -> ChunkConfig -> CheckPresent
checkPresentM d (LegacyChunks _) k = Legacy.checkKey d locations k
@@ -265,28 +264,25 @@ checkPresentGeneric' d check = ifM check
)
)
-storeExportM :: FilePath -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex Bool
-storeExportM d src _k loc p = liftIO $ catchBoolIO $ do
+storeExportM :: FilePath -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex ()
+storeExportM d src _k loc p = liftIO $ do
createDirectoryUnder d (takeDirectory dest)
-- Write via temp file so that checkPresentGeneric will not
-- see it until it's fully stored.
viaTmp (\tmp () -> withMeteredFile src p (L.writeFile tmp)) dest ()
- return True
where
dest = exportPath d loc
-retrieveExportM :: FilePath -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Bool
-retrieveExportM d _k loc dest p = liftIO $ catchBoolIO $ do
- withMeteredFile src p (L.writeFile dest)
- return True
+retrieveExportM :: FilePath -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex ()
+retrieveExportM d _k loc dest p =
+ liftIO $ withMeteredFile src p (L.writeFile dest)
where
src = exportPath d loc
-removeExportM :: FilePath -> Key -> ExportLocation -> Annex Bool
+removeExportM :: FilePath -> Key -> ExportLocation -> Annex ()
removeExportM d _k loc = liftIO $ do
nukeFile src
removeExportLocation d loc
- return True
where
src = exportPath d loc
@@ -294,15 +290,13 @@ checkPresentExportM :: FilePath -> Key -> ExportLocation -> Annex Bool
checkPresentExportM d _k loc =
checkPresentGeneric d [exportPath d loc]
-renameExportM :: FilePath -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe Bool)
-renameExportM d _k oldloc newloc = liftIO $ Just <$> go
+renameExportM :: FilePath -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe ())
+renameExportM d _k oldloc newloc = liftIO $ do
+ createDirectoryUnder d (takeDirectory dest)
+ renameFile src dest
+ removeExportLocation d oldloc
+ return (Just ())
where
- go = catchBoolIO $ do
- createDirectoryUnder d (takeDirectory dest)
- renameFile src dest
- removeExportLocation d oldloc
- return True
-
src = exportPath d oldloc
dest = exportPath d newloc
@@ -348,9 +342,9 @@ mkContentIdentifier f st =
fmap (ContentIdentifier . encodeBS . showInodeCache)
<$> toInodeCache noTSDelta f st
-retrieveExportWithContentIdentifierM :: FilePath -> ExportLocation -> ContentIdentifier -> FilePath -> Annex (Maybe Key) -> MeterUpdate -> Annex (Maybe Key)
+retrieveExportWithContentIdentifierM :: FilePath -> ExportLocation -> ContentIdentifier -> FilePath -> Annex Key -> MeterUpdate -> Annex Key
retrieveExportWithContentIdentifierM dir loc cid dest mkkey p =
- catchDefaultIO Nothing $ precheck $ docopy postcheck
+ precheck $ docopy postcheck
where
f = exportPath dir loc
@@ -406,39 +400,38 @@ retrieveExportWithContentIdentifierM dir loc cid dest mkkey p =
comparecid cont currcid
| currcid == Just cid = cont
- | otherwise = return Nothing
-
-storeExportWithContentIdentifierM :: FilePath -> FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex (Either String ContentIdentifier)
-storeExportWithContentIdentifierM dir src _k loc overwritablecids p =
- catchIO go (return . Left . show)
+ | otherwise = giveup "file content has changed"
+
+storeExportWithContentIdentifierM :: FilePath -> FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex ContentIdentifier
+storeExportWithContentIdentifierM dir src _k loc overwritablecids p = do
+ liftIO $ createDirectoryUnder dir destdir
+ withTmpFileIn destdir template $ \tmpf tmph -> do
+ liftIO $ withMeteredFile src p (L.hPut tmph)
+ liftIO $ hFlush tmph
+ liftIO $ hClose tmph
+ liftIO (getFileStatus tmpf) >>= liftIO . mkContentIdentifier tmpf >>= \case
+ Nothing -> giveup "unable to generate content identifier"
+ Just newcid -> do
+ checkExportContent dir loc
+ (newcid:overwritablecids)
+ (giveup "unsafe to overwrite file")
+ (const $ liftIO $ rename tmpf dest)
+ return newcid
where
- go = do
- liftIO $ createDirectoryUnder dir destdir
- withTmpFileIn destdir template $ \tmpf tmph -> do
- liftIO $ withMeteredFile src p (L.hPut tmph)
- liftIO $ hFlush tmph
- liftIO $ hClose tmph
- liftIO (getFileStatus tmpf) >>= liftIO . mkContentIdentifier tmpf >>= \case
- Nothing ->
- return $ Left "unable to generate content identifier"
- Just newcid ->
- checkExportContent dir loc (newcid:overwritablecids) (Left "unsafe to overwrite file") $ const $ do
- liftIO $ rename tmpf dest
- return (Right newcid)
dest = exportPath dir loc
(destdir, base) = splitFileName dest
template = relatedTemplate (base ++ ".tmp")
-removeExportWithContentIdentifierM :: FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> Annex Bool
+removeExportWithContentIdentifierM :: FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> Annex ()
removeExportWithContentIdentifierM dir k loc removeablecids =
- checkExportContent dir loc removeablecids False $ \case
- DoesNotExist -> return True
+ checkExportContent dir loc removeablecids (giveup "unsafe to remove modified file") $ \case
+ DoesNotExist -> return ()
KnownContentIdentifier -> removeExportM dir k loc
checkPresentExportWithContentIdentifierM :: FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> Annex Bool
checkPresentExportWithContentIdentifierM dir _k loc knowncids =
checkPresentGeneric' dir $
- checkExportContent dir loc knowncids False $ \case
+ checkExportContent dir loc knowncids (return False) $ \case
DoesNotExist -> return False
KnownContentIdentifier -> return True
@@ -459,18 +452,18 @@ data CheckResult = DoesNotExist | KnownContentIdentifier
--
-- So, it suffices to check if the destination file's current
-- content is known, and immediately run the callback.
-checkExportContent :: FilePath -> ExportLocation -> [ContentIdentifier] -> a -> (CheckResult -> Annex a) -> Annex a
+checkExportContent :: FilePath -> ExportLocation -> [ContentIdentifier] -> Annex a -> (CheckResult -> Annex a) -> Annex a
checkExportContent dir loc knowncids unsafe callback =
tryWhenExists (liftIO $ getFileStatus dest) >>= \case
Just destst
- | not (isRegularFile destst) -> return unsafe
+ | not (isRegularFile destst) -> unsafe
| otherwise -> catchDefaultIO Nothing (liftIO $ mkContentIdentifier dest destst) >>= \case
Just destcid
| destcid `elem` knowncids -> callback KnownContentIdentifier
-- dest exists with other content
- | otherwise -> return unsafe
+ | otherwise -> unsafe
-- should never happen
- Nothing -> return unsafe
+ Nothing -> unsafe
-- dest does not exist
Nothing -> callback DoesNotExist
where
diff --git a/Remote/Directory/LegacyChunked.hs b/Remote/Directory/LegacyChunked.hs
index 0943f63..5719ebe 100644
--- a/Remote/Directory/LegacyChunked.hs
+++ b/Remote/Directory/LegacyChunked.hs
@@ -70,7 +70,7 @@ storeLegacyChunked' meterupdate chunksize (d:dests) bs c = do
feed bytes' (sz - s) ls h
else return (l:ls)
-storeHelper :: FilePath -> (FilePath -> FilePath -> IO ()) -> Key -> ([FilePath] -> IO [FilePath]) -> FilePath -> FilePath -> IO Bool
+storeHelper :: FilePath -> (FilePath -> FilePath -> IO ()) -> Key -> ([FilePath] -> IO [FilePath]) -> FilePath -> FilePath -> IO ()
storeHelper repotop finalizer key storer tmpdir destdir = do
void $ liftIO $ tryIO $ createDirectoryUnder repotop tmpdir
Legacy.storeChunks key tmpdir destdir storer recorder finalizer
@@ -80,7 +80,7 @@ storeHelper repotop finalizer key storer tmpdir destdir = do
writeFile f s
void $ tryIO $ preventWrite f
-store :: FilePath -> ChunkSize -> (FilePath -> FilePath -> IO ()) -> Key -> L.ByteString -> MeterUpdate -> FilePath -> FilePath -> IO Bool
+store :: FilePath -> ChunkSize -> (FilePath -> FilePath -> IO ()) -> Key -> L.ByteString -> MeterUpdate -> FilePath -> FilePath -> IO ()
store repotop chunksize finalizer k b p = storeHelper repotop finalizer k $ \dests ->
storeLegacyChunked p chunksize dests b
@@ -88,11 +88,11 @@ store repotop chunksize finalizer k b p = storeHelper repotop finalizer k $ \des
- Done very innefficiently, by writing to a temp file.
- :/ This is legacy code..
-}
-retrieve :: (FilePath -> Key -> [FilePath]) -> FilePath -> Preparer Retriever
-retrieve locations d basek a = withOtherTmp $ \tmpdir -> do
+retrieve :: (FilePath -> Key -> [FilePath]) -> FilePath -> Retriever
+retrieve locations d basek p c = withOtherTmp $ \tmpdir -> do
showLongNote "This remote uses the deprecated chunksize setting. So this will be quite slow."
let tmp = tmpdir </> fromRawFilePath (keyFile basek) ++ ".directorylegacy.tmp"
- a $ Just $ byteRetriever $ \k sink -> do
+ let go = \k sink -> do
liftIO $ void $ withStoredFiles d locations k $ \fs -> do
forM_ fs $
S.appendFile tmp <=< S.readFile
@@ -100,6 +100,7 @@ retrieve locations d basek a = withOtherTmp $ \tmpdir -> do
b <- liftIO $ L.readFile tmp
liftIO $ nukeFile tmp
sink b
+ byteRetriever go basek p c
checkKey :: FilePath -> (FilePath -> Key -> [FilePath]) -> Key -> Annex Bool
checkKey d locations k = liftIO $ withStoredFiles d locations k $
diff --git a/Remote/External.hs b/Remote/External.hs
index 72f1a2c..7c3ae1e 100644
--- a/Remote/External.hs
+++ b/Remote/External.hs
@@ -121,8 +121,8 @@ gen r u rc gc rs
, cost = cst
, name = Git.repoDescribe r
, storeKey = storeKeyDummy
- , retrieveKeyFile = retreiveKeyFileDummy
- , retrieveKeyFileCheap = \_ _ _ -> return False
+ , retrieveKeyFile = retrieveKeyFileDummy
+ , retrieveKeyFileCheap = Nothing
-- External special remotes use many http libraries
-- and have no protection against redirects to
-- local private web servers, or in some cases
@@ -154,10 +154,10 @@ gen r u rc gc rs
, remoteStateHandle = rs
}
return $ Just $ specialRemote c
- (simplyPrepare tostore)
- (simplyPrepare toretrieve)
- (simplyPrepare toremove)
- (simplyPrepare tocheckkey)
+ tostore
+ toretrieve
+ toremove
+ tocheckkey
rmt
externaltype = fromMaybe (giveup "missing externaltype") (remoteAnnexExternalType gc)
@@ -223,36 +223,39 @@ checkExportSupported' external = go `catchNonAsync` (const (return False))
storeKeyM :: External -> Storer
storeKeyM external = fileStorer $ \k f p ->
- handleRequestKey external (\sk -> TRANSFER Upload sk f) k (Just p) $ \resp ->
+ either giveup return =<< go k f p
+ where
+ go k f p = handleRequestKey external (\sk -> TRANSFER Upload sk f) k (Just p) $ \resp ->
case resp of
- TRANSFER_SUCCESS Upload k' | k == k' -> result True
+ TRANSFER_SUCCESS Upload k' | k == k' ->
+ result (Right ())
TRANSFER_FAILURE Upload k' errmsg | k == k' ->
- Just $ do
- warning $ respErrorMessage "TRANSFER" errmsg
- return (Result False)
+ result (Left (respErrorMessage "TRANSFER" errmsg))
_ -> Nothing
retrieveKeyFileM :: External -> Retriever
-retrieveKeyFileM external = fileRetriever $ \d k p ->
- handleRequestKey external (\sk -> TRANSFER Download sk d) k (Just p) $ \resp ->
+retrieveKeyFileM external = fileRetriever $ \d k p ->
+ either giveup return =<< go d k p
+ where
+ go d k p = handleRequestKey external (\sk -> TRANSFER Download sk d) k (Just p) $ \resp ->
case resp of
TRANSFER_SUCCESS Download k'
- | k == k' -> result ()
+ | k == k' -> result $ Right ()
TRANSFER_FAILURE Download k' errmsg
- | k == k' -> Just $ giveup $
+ | k == k' -> result $ Left $
respErrorMessage "TRANSFER" errmsg
_ -> Nothing
removeKeyM :: External -> Remover
-removeKeyM external k = safely $
- handleRequestKey external REMOVE k Nothing $ \resp ->
+removeKeyM external k = either giveup return =<< go
+ where
+ go = handleRequestKey external REMOVE k Nothing $ \resp ->
case resp of
REMOVE_SUCCESS k'
- | k == k' -> result True
+ | k == k' -> result $ Right ()
REMOVE_FAILURE k' errmsg
- | k == k' -> Just $ do
- warning $ respErrorMessage "REMOVE" errmsg
- return (Result False)
+ | k == k' -> result $ Left $
+ respErrorMessage "REMOVE" errmsg
_ -> Nothing
checkPresentM :: External -> CheckPresent
@@ -276,35 +279,29 @@ whereisKeyM external k = handleRequestKey external WHEREIS k Nothing $ \resp ->
UNSUPPORTED_REQUEST -> result []
_ -> Nothing
-storeExportM :: External -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex Bool
-storeExportM external f k loc p = safely $
- handleRequestExport external loc req k (Just p) $ \resp -> case resp of
- TRANSFER_SUCCESS Upload k' | k == k' -> result True
+storeExportM :: External -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex ()
+storeExportM external f k loc p = either giveup return =<< go
+ where
+ go = handleRequestExport external loc req k (Just p) $ \resp -> case resp of
+ TRANSFER_SUCCESS Upload k' | k == k' -> result $ Right ()
TRANSFER_FAILURE Upload k' errmsg | k == k' ->
- Just $ do
- warning $ respErrorMessage "TRANSFER" errmsg
- return (Result False)
- UNSUPPORTED_REQUEST -> Just $ do
- warning "TRANSFEREXPORT not implemented by external special remote"
- return (Result False)
+ result $ Left $ respErrorMessage "TRANSFER" errmsg
+ UNSUPPORTED_REQUEST ->
+ result $ Left "TRANSFEREXPORT not implemented by external special remote"
_ -> Nothing
- where
req sk = TRANSFEREXPORT Upload sk f
-retrieveExportM :: External -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Bool
-retrieveExportM external k loc d p = safely $
- handleRequestExport external loc req k (Just p) $ \resp -> case resp of
+retrieveExportM :: External -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex ()
+retrieveExportM external k loc d p = either giveup return =<< go
+ where
+ go = handleRequestExport external loc req k (Just p) $ \resp -> case resp of
TRANSFER_SUCCESS Download k'
- | k == k' -> result True
+ | k == k' -> result $ Right ()
TRANSFER_FAILURE Download k' errmsg
- | k == k' -> Just $ do
- warning $ respErrorMessage "TRANSFER" errmsg
- return (Result False)
- UNSUPPORTED_REQUEST -> Just $ do
- warning "TRANSFEREXPORT not implemented by external special remote"
- return (Result False)
+ | k == k' -> result $ Left $ respErrorMessage "TRANSFER" errmsg
+ UNSUPPORTED_REQUEST ->
+ result $ Left "TRANSFEREXPORT not implemented by external special remote"
_ -> Nothing
- where
req sk = TRANSFEREXPORT Download sk d
checkPresentExportM :: External -> Key -> ExportLocation -> Annex Bool
@@ -322,53 +319,41 @@ checkPresentExportM external k loc = either giveup id <$> go
Left "CHECKPRESENTEXPORT not implemented by external special remote"
_ -> Nothing
-removeExportM :: External -> Key -> ExportLocation -> Annex Bool
-removeExportM external k loc = safely $
- handleRequestExport external loc REMOVEEXPORT k Nothing $ \resp -> case resp of
+removeExportM :: External -> Key -> ExportLocation -> Annex ()
+removeExportM external k loc = either giveup return =<< go
+ where
+ go = handleRequestExport external loc REMOVEEXPORT k Nothing $ \resp -> case resp of
REMOVE_SUCCESS k'
- | k == k' -> result True
+ | k == k' -> result $ Right ()
REMOVE_FAILURE k' errmsg
- | k == k' -> Just $ do
- warning $ respErrorMessage "REMOVE" errmsg
- return (Result False)
- UNSUPPORTED_REQUEST -> Just $ do
- warning "REMOVEEXPORT not implemented by external special remote"
- return (Result False)
+ | k == k' -> result $ Left $ respErrorMessage "REMOVE" errmsg
+ UNSUPPORTED_REQUEST -> result $
+ Left $ "REMOVEEXPORT not implemented by external special remote"
_ -> Nothing
-removeExportDirectoryM :: External -> ExportDirectory -> Annex Bool
-removeExportDirectoryM external dir = safely $
- handleRequest external req Nothing $ \resp -> case resp of
- REMOVEEXPORTDIRECTORY_SUCCESS -> result True
- REMOVEEXPORTDIRECTORY_FAILURE -> result False
- UNSUPPORTED_REQUEST -> result True
- _ -> Nothing
+removeExportDirectoryM :: External -> ExportDirectory -> Annex ()
+removeExportDirectoryM external dir = either giveup return =<< go
where
+ go = handleRequest external req Nothing $ \resp -> case resp of
+ REMOVEEXPORTDIRECTORY_SUCCESS -> result $ Right ()
+ REMOVEEXPORTDIRECTORY_FAILURE -> result $
+ Left "failed to remove directory"
+ UNSUPPORTED_REQUEST -> result $ Right ()
+ _ -> Nothing
req = REMOVEEXPORTDIRECTORY dir
-renameExportM :: External -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe Bool)
-renameExportM external k src dest = safely' (Just False) $
- handleRequestExport external src req k Nothing $ \resp -> case resp of
+renameExportM :: External -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe ())
+renameExportM external k src dest = either giveup return =<< go
+ where
+ go = handleRequestExport external src req k Nothing $ \resp -> case resp of
RENAMEEXPORT_SUCCESS k'
- | k' == k -> result (Just True)
+ | k' == k -> result $ Right (Just ())
RENAMEEXPORT_FAILURE k'
- | k' == k -> result (Just False)
- UNSUPPORTED_REQUEST -> result Nothing
+ | k' == k -> result $ Left "failed to rename exported file"
+ UNSUPPORTED_REQUEST -> result (Right Nothing)
_ -> Nothing
- where
req sk = RENAMEEXPORT sk dest
-safely :: Annex Bool -> Annex Bool
-safely = safely' False
-
-safely' :: a -> Annex a -> Annex a
-safely' onerr a = go =<< tryNonAsync a
- where
- go (Right r) = return r
- go (Left e) = do
- toplevelWarning False (show e)
- return onerr
-
{- Sends a Request to the external remote, and waits for it to generate
- a Response. That is fed into the responsehandler, which should return
- the action to run for it (or Nothing if there's a protocol error).
@@ -627,7 +612,7 @@ startExternal external = do
}
p <- propgit g basep
(Just hin, Just hout, Just herr, ph) <-
- createProcess p `catchIO` runerr cmdpath
+ createProcess p `catchNonAsync` runerr cmdpath
stderrelay <- async $ errrelayer herr
cv <- newTVarIO $ externalDefaultConfig external
ccv <- newTVarIO id
@@ -769,14 +754,14 @@ checkUrlM :: External -> URLString -> Annex UrlContents
checkUrlM external url =
handleRequest external (CHECKURL url) Nothing $ \req -> case req of
CHECKURL_CONTENTS sz f -> result $ UrlContents sz $
- if null f then Nothing else Just $ mkSafeFilePath f
+ if null f then Nothing else Just f
CHECKURL_MULTI l -> result $ UrlMulti $ map mkmulti l
CHECKURL_FAILURE errmsg -> Just $ giveup $
respErrorMessage "CHECKURL" errmsg
UNSUPPORTED_REQUEST -> giveup "CHECKURL not implemented by external special remote"
_ -> Nothing
where
- mkmulti (u, s, f) = (u, s, mkSafeFilePath f)
+ mkmulti (u, s, f) = (u, s, f)
retrieveUrl :: Retriever
retrieveUrl = fileRetriever $ \f k p -> do
diff --git a/Remote/GCrypt.hs b/Remote/GCrypt.hs
index 13c6db9..997a3ec 100644
--- a/Remote/GCrypt.hs
+++ b/Remote/GCrypt.hs
@@ -132,8 +132,8 @@ gen' r u c gc rs = do
, cost = cst
, name = Git.repoDescribe r
, storeKey = storeKeyDummy
- , retrieveKeyFile = retreiveKeyFileDummy
- , retrieveKeyFileCheap = \_ _ _ -> return False
+ , retrieveKeyFile = retrieveKeyFileDummy
+ , retrieveKeyFileCheap = Nothing
, retrievalSecurityPolicy = RetrievalAllKeysSecure
, removeKey = removeKeyDummy
, lockContent = Nothing
@@ -159,10 +159,10 @@ gen' r u c gc rs = do
, remoteStateHandle = rs
}
return $ Just $ specialRemote' specialcfg c
- (simplyPrepare $ store this rsyncopts)
- (simplyPrepare $ retrieve this rsyncopts)
- (simplyPrepare $ remove this rsyncopts)
- (simplyPrepare $ checkKey this rsyncopts)
+ (store this rsyncopts)
+ (retrieve this rsyncopts)
+ (remove this rsyncopts)
+ (checkKey this rsyncopts)
this
where
specialcfg
@@ -367,20 +367,21 @@ store r rsyncopts k s p = do
store' :: Git.Repo -> Remote -> Remote.Rsync.RsyncOpts -> Storer
store' repo r rsyncopts
| not $ Git.repoIsUrl repo =
- byteStorer $ \k b p -> guardUsable repo (return False) $ liftIO $ do
+ byteStorer $ \k b p -> guardUsable repo (giveup "cannot access remote") $ liftIO $ do
let tmpdir = Git.repoLocation repo </> "tmp" </> fromRawFilePath (keyFile k)
void $ tryIO $ createDirectoryUnder (Git.repoLocation repo) tmpdir
let tmpf = tmpdir </> fromRawFilePath (keyFile k)
meteredWriteFile p tmpf b
let destdir = parentDir $ gCryptLocation repo k
Remote.Directory.finalizeStoreGeneric (Git.repoLocation repo) tmpdir destdir
- return True
| Git.repoIsSsh repo = if accessShell r
then fileStorer $ \k f p -> do
oh <- mkOutputHandler
- Ssh.rsyncHelper oh (Just p)
+ ok <- Ssh.rsyncHelper oh (Just p)
=<< Ssh.rsyncParamsRemote False r Upload k f
(AssociatedFile Nothing)
+ unless ok $
+ giveup "rsync failed"
else fileStorer $ Remote.Rsync.store rsyncopts
| otherwise = unsupportedUrl
@@ -392,7 +393,7 @@ retrieve r rsyncopts k p sink = do
retrieve' :: Git.Repo -> Remote -> Remote.Rsync.RsyncOpts -> Retriever
retrieve' repo r rsyncopts
| not $ Git.repoIsUrl repo = byteRetriever $ \k sink ->
- guardUsable repo (return False) $
+ guardUsable repo (giveup "cannot access remote") $
sink =<< liftIO (L.readFile $ gCryptLocation repo k)
| Git.repoIsSsh repo = if accessShell r
then fileRetriever $ \f k p -> do
@@ -412,7 +413,7 @@ remove r rsyncopts k = do
remove' :: Git.Repo -> Remote -> Remote.Rsync.RsyncOpts -> Remover
remove' repo r rsyncopts k
- | not $ Git.repoIsUrl repo = guardUsable repo (return False) $
+ | not $ Git.repoIsUrl repo = guardUsable repo (giveup "cannot access remote") $
liftIO $ Remote.Directory.removeDirGeneric (Git.repoLocation repo) (parentDir (gCryptLocation repo k))
| Git.repoIsSsh repo = shellOrRsync r removeshell removersync
| otherwise = unsupportedUrl
diff --git a/Remote/Git.hs b/Remote/Git.hs
index d0c6b3c..532b640 100644
--- a/Remote/Git.hs
+++ b/Remote/Git.hs
@@ -149,15 +149,15 @@ gitSetup (Enable _) Nothing _ _ _ = error "unable to enable git remote with no s
configRead :: Bool -> Git.Repo -> Annex Git.Repo
configRead autoinit r = do
gc <- Annex.getRemoteGitConfig r
- u <- getRepoUUID r
+ hasuuid <- (/= NoUUID) <$> getRepoUUID r
annexignore <- liftIO $ getDynamicConfig (remoteAnnexIgnore gc)
- case (repoCheap r, annexignore, u) of
+ case (repoCheap r, annexignore, hasuuid) of
(_, True, _) -> return r
(True, _, _)
- | remoteAnnexCheckUUID gc -> tryGitConfigRead autoinit r
+ | remoteAnnexCheckUUID gc -> tryGitConfigRead autoinit r hasuuid
| otherwise -> return r
- (False, _, NoUUID) -> configSpecialGitRemotes r >>= \case
- Nothing -> tryGitConfigRead autoinit r
+ (False, _, False) -> configSpecialGitRemotes r >>= \case
+ Nothing -> tryGitConfigRead autoinit r False
Just r' -> return r'
_ -> return r
@@ -183,7 +183,7 @@ gen r u rc gc rs
, name = Git.repoDescribe r
, storeKey = copyToRemote new st
, retrieveKeyFile = copyFromRemote new st
- , retrieveKeyFileCheap = copyFromRemoteCheap new st
+ , retrieveKeyFileCheap = copyFromRemoteCheap new st r
, retrievalSecurityPolicy = RetrievalAllKeysSecure
, removeKey = dropKey new st
, lockContent = Just (lockKey new st)
@@ -244,8 +244,8 @@ repoAvail r
{- Tries to read the config for a specified remote, updates state, and
- returns the updated repo. -}
-tryGitConfigRead :: Bool -> Git.Repo -> Annex Git.Repo
-tryGitConfigRead autoinit r
+tryGitConfigRead :: Bool -> Git.Repo -> Bool -> Annex Git.Repo
+tryGitConfigRead autoinit r hasuuid
| haveconfig r = return r -- already read
| Git.repoIsSsh r = storeUpdatedRemote $ do
v <- Ssh.onRemote NoConsumeStdin r
@@ -258,9 +258,12 @@ tryGitConfigRead autoinit r
Left _ -> configlist_failed
| Git.repoIsHttp r = storeUpdatedRemote geturlconfig
| Git.GCrypt.isEncrypted r = handlegcrypt =<< getConfigMaybe (remoteAnnexConfig r "uuid")
- | Git.repoIsUrl r = return r
- | otherwise = storeUpdatedRemote $ liftIO $
- readlocalannexconfig `catchNonAsync` (const $ return r)
+ | Git.repoIsUrl r = do
+ set_ignore "uses a protocol not supported by git-annex" False
+ return r
+ | otherwise = storeUpdatedRemote $
+ liftIO readlocalannexconfig
+ `catchNonAsync` const failedreadlocalconfig
where
haveconfig = not . M.null . Git.config
@@ -339,6 +342,13 @@ tryGitConfigRead autoinit r
s <- Annex.new r
Annex.eval s $ check `finally` stopCoProcesses
+ failedreadlocalconfig = do
+ unless hasuuid $ case Git.remoteName r of
+ Nothing -> noop
+ Just n -> do
+ warning $ "Remote " ++ n ++ " cannot currently be accessed."
+ return r
+
configlistfields = if autoinit
then [(Fields.autoInit, "1")]
else []
@@ -413,31 +423,26 @@ keyUrls gc repo r key = map tourl locs'
#endif
remoteconfig = gitconfig r
-dropKey :: Remote -> State -> Key -> Annex Bool
+dropKey :: Remote -> State -> Key -> Annex ()
dropKey r st key = do
repo <- getRepo r
- catchNonAsync
- (dropKey' repo r st key)
- (\e -> warning (show e) >> return False)
+ dropKey' repo r st key
-dropKey' :: Git.Repo -> Remote -> State -> Key -> Annex Bool
+dropKey' :: Git.Repo -> Remote -> State -> Key -> Annex ()
dropKey' repo r st@(State connpool duc _ _ _) key
| not $ Git.repoIsUrl repo = ifM duc
- ( guardUsable repo (return False) $
+ ( guardUsable repo (giveup "cannot access remote") $
commitOnCleanup repo r st $ onLocalFast st $ do
whenM (Annex.Content.inAnnex key) $ do
Annex.Content.lockContentForRemoval key $ \lock -> do
Annex.Content.removeAnnex lock
logStatus key InfoMissing
Annex.Content.saveState True
- return True
- , return False
+ , giveup "remote does not have expected annex.uuid value"
)
- | Git.repoIsHttp repo = do
- warning "dropping from http remote not supported"
- return False
+ | Git.repoIsHttp repo = giveup "dropping from http remote not supported"
| otherwise = commitOnCleanup repo r st $ do
- let fallback = Ssh.dropKey repo key
+ let fallback = Ssh.dropKey' repo key
P2PHelper.remove (Ssh.runProto r connpool (return False) fallback) key
lockKey :: Remote -> State -> Key -> (VerifiedCopy -> Annex r) -> Annex r
@@ -505,50 +510,55 @@ lockKey' repo r st@(State connpool duc _ _ _) key callback
failedlock = giveup "can't lock content"
{- Tries to copy a key's content from a remote's annex to a file. -}
-copyFromRemote :: Remote -> State -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)
+copyFromRemote :: Remote -> State -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex Verification
copyFromRemote = copyFromRemote' False
-copyFromRemote' :: Bool -> Remote -> State -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)
+copyFromRemote' :: Bool -> Remote -> State -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex Verification
copyFromRemote' forcersync r st key file dest meterupdate = do
repo <- getRepo r
copyFromRemote'' repo forcersync r st key file dest meterupdate
-copyFromRemote'' :: Git.Repo -> Bool -> Remote -> State -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)
+copyFromRemote'' :: Git.Repo -> Bool -> Remote -> State -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex Verification
copyFromRemote'' repo forcersync r st@(State connpool _ _ _ _) key file dest meterupdate
- | Git.repoIsHttp repo = unVerified $ do
+ | Git.repoIsHttp repo = do
gc <- Annex.getGitConfig
- Url.withUrlOptionsPromptingCreds $
+ ok <- Url.withUrlOptionsPromptingCreds $
Annex.Content.downloadUrl key meterupdate (keyUrls gc repo r key) dest
- | not $ Git.repoIsUrl repo = guardUsable repo (unVerified (return False)) $ do
+ unless ok $
+ giveup "failed to download content"
+ return UnVerified
+ | not $ Git.repoIsUrl repo = guardUsable repo (giveup "cannot access remote") $ do
params <- Ssh.rsyncParams r Download
u <- getUUID
hardlink <- wantHardLink
-- run copy from perspective of remote
- onLocalFast st $ do
- v <- Annex.Content.prepSendAnnex key
- case v of
- Nothing -> do
- warning "content is not present in remote"
- return (False, UnVerified)
- Just (object, checksuccess) -> do
- copier <- mkCopier hardlink st params
- runTransfer (Transfer Download u (fromKey id key))
- file stdRetry $ \p ->
- metered (Just (combineMeterUpdate p meterupdate)) key $ \_ p' ->
- copier object dest p' checksuccess
+ onLocalFast st $ Annex.Content.prepSendAnnex key >>= \case
+ Just (object, checksuccess) -> do
+ copier <- mkCopier hardlink st params
+ (ok, v) <- runTransfer (Transfer Download u (fromKey id key))
+ file stdRetry $ \p ->
+ metered (Just (combineMeterUpdate p meterupdate)) key $ \_ p' ->
+ copier object dest p' checksuccess
+ if ok
+ then return v
+ else giveup "failed to retrieve content from remote"
+ Nothing -> giveup "content is not present in remote"
| Git.repoIsSsh repo = if forcersync
- then fallback meterupdate
+ then do
+ (ok, v) <- fallback meterupdate
+ if ok
+ then return v
+ else giveup "failed to retrieve content from remote"
else P2PHelper.retrieve
(\p -> Ssh.runProto r connpool (return (False, UnVerified)) (fallback p))
key file dest meterupdate
- | otherwise = do
- warning "copying from non-ssh, non-http remote not supported"
- unVerified (return False)
+ | otherwise = giveup "copying from non-ssh, non-http remote not supported"
where
fallback p = unVerified $ feedprogressback $ \p' -> do
oh <- mkOutputHandlerQuiet
Ssh.rsyncHelper oh (Just (combineMeterUpdate p' p))
=<< Ssh.rsyncParamsRemote False r Download key dest file
+
{- Feed local rsync's progress info back to the remote,
- by forking a feeder thread that runs
- git-annex-shell transferinfo at the same time
@@ -609,58 +619,49 @@ copyFromRemote'' repo forcersync r st@(State connpool _ _ _ _) key file dest met
=<< tryTakeMVar pidv
bracketIO noop (const cleanup) (const $ a feeder)
-copyFromRemoteCheap :: Remote -> State -> Key -> AssociatedFile -> FilePath -> Annex Bool
-copyFromRemoteCheap r st key af file = do
- repo <- getRepo r
- copyFromRemoteCheap' repo r st key af file
-
-copyFromRemoteCheap' :: Git.Repo -> Remote -> State -> Key -> AssociatedFile -> FilePath -> Annex Bool
+copyFromRemoteCheap :: Remote -> State -> Git.Repo -> Maybe (Key -> AssociatedFile -> FilePath -> Annex ())
+copyFromRemoteCheap r st repo
#ifndef mingw32_HOST_OS
-copyFromRemoteCheap' repo r st key af file
- | not $ Git.repoIsUrl repo = guardUsable repo (return False) $ do
+ | not $ Git.repoIsUrl repo = Just $ \key _af file -> guardUsable repo (giveup "cannot access remote") $ do
gc <- getGitConfigFromState st
loc <- liftIO $ gitAnnexLocation key repo gc
liftIO $ ifM (R.doesPathExist loc)
( do
absloc <- absPath (fromRawFilePath loc)
- catchBoolIO $ do
- createSymbolicLink absloc file
- return True
- , return False
+ createSymbolicLink absloc file
+ , giveup "remote does not contain key"
)
- | Git.repoIsSsh repo =
+ | Git.repoIsSsh repo = Just $ \key af file ->
ifM (Annex.Content.preseedTmp key file)
- ( fst <$> copyFromRemote' True r st key af file nullMeterUpdate
- , return False
+ ( void $ copyFromRemote' True r st key af file nullMeterUpdate
+ , giveup "cannot preseed rsync with existing content"
)
- | otherwise = return False
+ | otherwise = Nothing
#else
-copyFromRemoteCheap' _ _ _ _ _ _ = return False
+copyFromRemoteCheap' _ _ _ = Nothing
#endif
{- Tries to copy a key's content to a remote's annex. -}
-copyToRemote :: Remote -> State -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
+copyToRemote :: Remote -> State -> Key -> AssociatedFile -> MeterUpdate -> Annex ()
copyToRemote r st key file meterupdate = do
repo <- getRepo r
copyToRemote' repo r st key file meterupdate
-copyToRemote' :: Git.Repo -> Remote -> State -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
+copyToRemote' :: Git.Repo -> Remote -> State -> Key -> AssociatedFile -> MeterUpdate -> Annex ()
copyToRemote' repo r st@(State connpool duc _ _ _) key file meterupdate
| not $ Git.repoIsUrl repo = ifM duc
- ( guardUsable repo (return False) $ commitOnCleanup repo r st $
+ ( guardUsable repo (giveup "cannot access remote") $ commitOnCleanup repo r st $
copylocal =<< Annex.Content.prepSendAnnex key
- , return False
+ , giveup "remote does not have expected annex.uuid value"
)
| Git.repoIsSsh repo = commitOnCleanup repo r st $
P2PHelper.store
- (\p -> Ssh.runProto r connpool (return False) (copyremotefallback p))
+ (Ssh.runProto r connpool (return False) . copyremotefallback)
key file meterupdate
- | otherwise = do
- warning "copying to non-ssh repo not supported"
- return False
+ | otherwise = giveup "copying to non-ssh repo not supported"
where
- copylocal Nothing = return False
+ copylocal Nothing = giveup "content not available"
copylocal (Just (object, checksuccess)) = do
-- The checksuccess action is going to be run in
-- the remote's Annex, but it needs access to the local
@@ -670,7 +671,7 @@ copyToRemote' repo r st@(State connpool duc _ _ _) key file meterupdate
u <- getUUID
hardlink <- wantHardLink
-- run copy from perspective of remote
- onLocalFast st $ ifM (Annex.Content.inAnnex key)
+ res <- onLocalFast st $ ifM (Annex.Content.inAnnex key)
( return True
, runTransfer (Transfer Download u (fromKey id key)) file stdRetry $ \p -> do
copier <- mkCopier hardlink st params
@@ -682,7 +683,11 @@ copyToRemote' repo r st@(State connpool duc _ _ _) key file meterupdate
Annex.Content.saveState True
return res
)
- copyremotefallback p = Annex.Content.sendAnnex key noop $ \object -> do
+ unless res $
+ giveup "failed to send content to remote"
+ copyremotefallback p = either (const False) id
+ <$> tryNonAsync (copyremotefallback' p)
+ copyremotefallback' p = Annex.Content.sendAnnex key noop $ \object -> do
-- This is too broad really, but recvkey normally
-- verifies content anyway, so avoid complicating
-- it with a local sendAnnex check and rollback.
@@ -898,7 +903,7 @@ mkState r u gc = do
rv <- liftIO newEmptyMVar
let getrepo = ifM (liftIO $ isEmptyMVar rv)
( do
- r' <- tryGitConfigRead False r
+ r' <- tryGitConfigRead False r True
let t = (r', extractGitConfig FromGitConfig r')
void $ liftIO $ tryPutMVar rv t
return t
diff --git a/Remote/GitLFS.hs b/Remote/GitLFS.hs
index f80cc59..b6f6f9c 100644
--- a/Remote/GitLFS.hs
+++ b/Remote/GitLFS.hs
@@ -91,10 +91,10 @@ gen r u rc gc rs = do
{ chunkConfig = NoChunks
}
return $ Just $ specialRemote' specialcfg c
- (simplyPrepare $ store rs h)
- (simplyPrepare $ retrieve rs h)
- (simplyPrepare $ remove h)
- (simplyPrepare $ checkKey rs h)
+ (store rs h)
+ (retrieve rs h)
+ (remove h)
+ (checkKey rs h)
(this c cst)
where
this c cst = Remote
@@ -102,8 +102,8 @@ gen r u rc gc rs = do
, cost = cst
, name = Git.repoDescribe r
, storeKey = storeKeyDummy
- , retrieveKeyFile = retreiveKeyFileDummy
- , retrieveKeyFileCheap = retrieveCheap
+ , retrieveKeyFile = retrieveKeyFileDummy
+ , retrieveKeyFileCheap = Nothing
-- content stored on git-lfs is hashed with SHA256
-- no matter what git-annex key it's for, and the hash
-- is checked on download
@@ -134,34 +134,35 @@ gen r u rc gc rs = do
}
mySetup :: SetupStage -> Maybe UUID -> Maybe CredPair -> RemoteConfig -> RemoteGitConfig -> Annex (RemoteConfig, UUID)
-mySetup _ mu _ c gc = do
+mySetup ss mu _ c gc = do
u <- maybe (liftIO genUUID) return mu
(c', _encsetup) <- encryptionSetup c gc
pc <- either giveup return . parseRemoteConfig c' =<< configParser remote c'
+ let failinitunlessforced msg = case ss of
+ Init -> unlessM (Annex.getState Annex.force) (giveup msg)
+ Enable _ -> noop
case (isEncrypted pc, Git.GCrypt.urlPrefix `isPrefixOf` url) of
(False, False) -> noop
(True, True) -> Remote.GCrypt.setGcryptEncryption pc remotename
- (True, False) -> unlessM (Annex.getState Annex.force) $
- giveup $ unwords $
- [ "Encryption is enabled for this remote,"
- , "but only the files that git-annex stores on"
- , "it would be encrypted; "
- , "anything that git push sends to it would"
- , "not be encrypted. Recommend prefixing the"
- , "url with \"gcrypt::\" to also encrypt"
- , "git pushes."
- , "(Use --force if you want to use this"
- , "likely insecure configuration.)"
- ]
- (False, True) -> unlessM (Annex.getState Annex.force) $
- giveup $ unwords $
- [ "You used a \"gcrypt::\" url for this remote,"
- , "but encryption=none prevents git-annex"
- , "from encrypting files it stores there."
- , "(Use --force if you want to use this"
- , "likely insecure configuration.)"
- ]
+ (True, False) -> failinitunlessforced $ unwords $
+ [ "Encryption is enabled for this remote,"
+ , "but only the files that git-annex stores on"
+ , "it would be encrypted; "
+ , "anything that git push sends to it would"
+ , "not be encrypted. Recommend prefixing the"
+ , "url with \"gcrypt::\" to also encrypt"
+ , "git pushes."
+ , "(Use --force if you want to use this"
+ , "likely insecure configuration.)"
+ ]
+ (False, True) -> failinitunlessforced $ unwords
+ [ "You used a \"gcrypt::\" url for this remote,"
+ , "but encryption=none prevents git-annex"
+ , "from encrypting files it stores there."
+ , "(Use --force if you want to use this"
+ , "likely insecure configuration.)"
+ ]
-- Set up remote.name.url to point to the repo,
-- (so it's also usable by git as a non-special remote),
@@ -439,18 +440,15 @@ mkDownloadRequest rs k = case (extractKeySha256 k, extractKeySize k) of
store :: RemoteStateHandle -> TVar LFSHandle -> Storer
store rs h = fileStorer $ \k src p -> getLFSEndpoint LFS.RequestUpload h >>= \case
- Nothing -> return False
- Just endpoint -> flip catchNonAsync failederr $ do
+ Nothing -> giveup "unable to connect to git-lfs endpoint"
+ Just endpoint -> do
(req, sha256, size) <- mkUploadRequest rs k src
sendTransferRequest req endpoint >>= \case
- Left err -> do
- warning err
- return False
Right resp -> do
body <- liftIO $ httpBodyStorer src p
forM_ (LFS.objects resp) $
send body sha256 size
- return True
+ Left err -> giveup err
where
send body sha256 size tro
| LFS.resp_oid tro /= sha256 || LFS.resp_size tro /= size =
@@ -465,9 +463,6 @@ store rs h = fileStorer $ \k src p -> getLFSEndpoint LFS.RequestUpload h >>= \ca
Just [] -> noop -- server already has it
Just reqs -> forM_ reqs $
makeSmallAPIRequest . setRequestCheckStatus
- failederr e = do
- warning (show e)
- return False
retrieve :: RemoteStateHandle -> TVar LFSHandle -> Retriever
retrieve rs h = fileRetriever $ \dest k p -> getLFSEndpoint LFS.RequestDownload h >>= \case
@@ -530,10 +525,5 @@ checkKey rs h key = getLFSEndpoint LFS.RequestDownload h >>= \case
giveup "git-lfs server replied with other object than the one we requested"
| otherwise -> return True
-retrieveCheap :: Key -> AssociatedFile -> FilePath -> Annex Bool
-retrieveCheap _ _ _ = return False
-
remove :: TVar LFSHandle -> Remover
-remove _h _key = do
- warning "git-lfs does not support removing content"
- return False
+remove _h _key = giveup "git-lfs does not support removing content"
diff --git a/Remote/Glacier.hs b/Remote/Glacier.hs
index 4c758d6..dfba203 100644
--- a/Remote/Glacier.hs
+++ b/Remote/Glacier.hs
@@ -65,10 +65,10 @@ gen r u rc gc rs = new
<*> remoteCost gc veryExpensiveRemoteCost
where
new c cst = Just $ specialRemote' specialcfg c
- (prepareStore this)
- (prepareRetrieve this)
- (simplyPrepare $ remove this)
- (simplyPrepare $ checkKey this)
+ (store this)
+ (retrieve this)
+ (remove this)
+ (checkKey this)
this
where
this = Remote
@@ -76,8 +76,8 @@ gen r u rc gc rs = new
, cost = cst
, name = Git.repoDescribe r
, storeKey = storeKeyDummy
- , retrieveKeyFile = retreiveKeyFileDummy
- , retrieveKeyFileCheap = retrieveCheap this
+ , retrieveKeyFile = retrieveKeyFileDummy
+ , retrieveKeyFileCheap = Nothing
-- glacier-cli does not follow redirects and does
-- not support file://, as far as we know, but
-- there's no guarantee that will continue to be
@@ -118,16 +118,17 @@ glacierSetup ss mu mcreds c gc = do
glacierSetup' ss u mcreds c gc
glacierSetup' :: SetupStage -> UUID -> Maybe CredPair -> RemoteConfig -> RemoteGitConfig -> Annex (RemoteConfig, UUID)
glacierSetup' ss u mcreds c gc = do
- (c', encsetup) <- encryptionSetup c gc
- c'' <- setRemoteCredPair encsetup c' gc (AWS.creds u) mcreds
- let fullconfig = c'' `M.union` defaults
- pc <- either giveup return . parseRemoteConfig fullconfig
- =<< configParser remote fullconfig
+ (c', encsetup) <- encryptionSetup (c `M.union` defaults) gc
+ pc <- either giveup return . parseRemoteConfig c'
+ =<< configParser remote c'
+ c'' <- setRemoteCredPair encsetup pc gc (AWS.creds u) mcreds
+ pc' <- either giveup return . parseRemoteConfig c''
+ =<< configParser remote c''
case ss of
- Init -> genVault pc gc u
+ Init -> genVault pc' gc u
_ -> return ()
- gitConfigSpecialRemote u fullconfig [("glacier", "true")]
- return (fullconfig, u)
+ gitConfigSpecialRemote u c'' [("glacier", "true")]
+ return (c'', u)
where
remotename = fromJust (lookupName c)
defvault = remotename ++ "-" ++ fromUUID u
@@ -136,18 +137,19 @@ glacierSetup' ss u mcreds c gc = do
, (vaultField, Proposed defvault)
]
-prepareStore :: Remote -> Preparer Storer
-prepareStore r = checkPrepare nonEmpty (byteStorer $ store r)
+store :: Remote -> Storer
+store r k b p = do
+ checkNonEmpty k
+ byteStorer (store' r) k b p
-nonEmpty :: Key -> Annex Bool
-nonEmpty k
- | fromKey keySize k == Just 0 = do
- warning "Cannot store empty files in Glacier."
- return False
- | otherwise = return True
+checkNonEmpty :: Key -> Annex ()
+checkNonEmpty k
+ | fromKey keySize k == Just 0 =
+ giveup "Cannot store empty files in Glacier."
+ | otherwise = return ()
-store :: Remote -> Key -> L.ByteString -> MeterUpdate -> Annex Bool
-store r k b p = go =<< glacierEnv c gc u
+store' :: Remote -> Key -> L.ByteString -> MeterUpdate -> Annex ()
+store' r k b p = go =<< glacierEnv c gc u
where
c = config r
gc = gitconfig r
@@ -159,19 +161,17 @@ store r k b p = go =<< glacierEnv c gc u
, Param $ getVault $ config r
, Param "-"
]
- go Nothing = return False
- go (Just e) = do
+ go Nothing = giveup "Glacier not usable."
+ go (Just e) = liftIO $ do
let cmd = (proc "glacier" (toCommand params)) { env = Just e }
- liftIO $ catchBoolIO $
- withHandle StdinHandle createProcessSuccess cmd $ \h -> do
- meteredWrite p h b
- return True
+ withHandle StdinHandle createProcessSuccess cmd $ \h ->
+ meteredWrite p h b
-prepareRetrieve :: Remote -> Preparer Retriever
-prepareRetrieve = simplyPrepare . byteRetriever . retrieve
+retrieve :: Remote -> Retriever
+retrieve = byteRetriever . retrieve'
-retrieve :: Remote -> Key -> (L.ByteString -> Annex Bool) -> Annex Bool
-retrieve r k sink = go =<< glacierEnv c gc u
+retrieve' :: Remote -> Key -> (L.ByteString -> Annex ()) -> Annex ()
+retrieve' r k sink = go =<< glacierEnv c gc u
where
c = config r
gc = gitconfig r
@@ -184,35 +184,33 @@ retrieve r k sink = go =<< glacierEnv c gc u
, Param $ archive r k
]
go Nothing = giveup "cannot retrieve from glacier"
- go (Just e) = do
+ go (Just environ) = do
let cmd = (proc "glacier" (toCommand params))
- { env = Just e
+ { env = Just environ
, std_out = CreatePipe
}
(_, Just h, _, pid) <- liftIO $ createProcess cmd
- -- Glacier cannot store empty files, so if the output is
- -- empty, the content is not available yet.
- ok <- ifM (liftIO $ hIsEOF h)
- ( return False
- , sink =<< liftIO (L.hGetContents h)
- )
- liftIO $ hClose h
- liftIO $ forceSuccessProcess cmd pid
- unless ok $ do
- showLongNote "Recommend you wait up to 4 hours, and then run this command again."
- return ok
-
-retrieveCheap :: Remote -> Key -> AssociatedFile -> FilePath -> Annex Bool
-retrieveCheap _ _ _ _ = return False
+ let cleanup = liftIO $ do
+ hClose h
+ forceSuccessProcess cmd pid
+ flip finally cleanup $ do
+ -- Glacier cannot store empty files, so if
+ -- the output is empty, the content is not
+ -- available yet.
+ whenM (liftIO $ hIsEOF h) $
+ giveup "Content is not available from glacier yet. Recommend you wait up to 4 hours, and then run this command again."
+ sink =<< liftIO (L.hGetContents h)
remove :: Remote -> Remover
-remove r k = glacierAction r
- [ Param "archive"
-
- , Param "delete"
- , Param $ getVault $ config r
- , Param $ archive r k
- ]
+remove r k = unlessM go $
+ giveup "removal from glacier failed"
+ where
+ go = glacierAction r
+ [ Param "archive"
+ , Param "delete"
+ , Param $ getVault $ config r
+ , Param $ archive r k
+ ]
checkKey :: Remote -> CheckPresent
checkKey r k = do
diff --git a/Remote/Helper/Chunked.hs b/Remote/Helper/Chunked.hs
index 5dfd299..9a6b363 100644
--- a/Remote/Helper/Chunked.hs
+++ b/Remote/Helper/Chunked.hs
@@ -117,28 +117,22 @@ storeChunks
-> MeterUpdate
-> Storer
-> CheckPresent
- -> Annex Bool
+ -> Annex ()
storeChunks u chunkconfig encryptor k f p storer checker =
case chunkconfig of
- (UnpaddedChunks chunksize) | isStableKey k ->
- bracketIO open close (go chunksize)
+ (UnpaddedChunks chunksize) | isStableKey k -> do
+ h <- liftIO $ openBinaryFile f ReadMode
+ go chunksize h
+ liftIO $ hClose h
_ -> storer k (FileContent f) p
where
- open = tryIO $ openBinaryFile f ReadMode
-
- close (Right h) = hClose h
- close (Left _) = noop
-
- go _ (Left e) = do
- warning (show e)
- return False
- go chunksize (Right h) = do
+ go chunksize h = do
let chunkkeys = chunkKeyStream k chunksize
(chunkkeys', startpos) <- seekResume h encryptor chunkkeys checker
b <- liftIO $ L.hGetContents h
gochunks p startpos chunksize b chunkkeys'
- gochunks :: MeterUpdate -> BytesProcessed -> ChunkSize -> L.ByteString -> ChunkKeyStream -> Annex Bool
+ gochunks :: MeterUpdate -> BytesProcessed -> ChunkSize -> L.ByteString -> ChunkKeyStream -> Annex ()
gochunks meterupdate startpos chunksize = loop startpos . splitchunk
where
splitchunk = L.splitAt chunksize
@@ -148,16 +142,12 @@ storeChunks u chunkconfig encryptor k f p storer checker =
-- Once all chunks are successfully
-- stored, update the chunk log.
chunksStored u k (FixedSizeChunks chunksize) numchunks
- return True
| otherwise = do
liftIO $ meterupdate' zeroBytesProcessed
let (chunkkey, chunkkeys') = nextChunkKeyStream chunkkeys
- ifM (storer chunkkey (ByteContent chunk) meterupdate')
- ( do
- let bytesprocessed' = addBytesProcessed bytesprocessed (L.length chunk)
- loop bytesprocessed' (splitchunk bs) chunkkeys'
- , return False
- )
+ storer chunkkey (ByteContent chunk) meterupdate'
+ let bytesprocessed' = addBytesProcessed bytesprocessed (L.length chunk)
+ loop bytesprocessed' (splitchunk bs) chunkkeys'
where
numchunks = numChunks chunkkeys
{- The MeterUpdate that is passed to the action
@@ -209,19 +199,14 @@ seekResume h encryptor chunkkeys checker = do
{- Removes all chunks of a key from a remote, by calling a remover
- action on each.
-
- - The remover action should succeed even if asked to
- - remove a key that is not present on the remote.
- -
- This action may be called on a chunked key. It will simply remove it.
-}
-removeChunks :: (Key -> Annex Bool) -> UUID -> ChunkConfig -> EncKey -> Key -> Annex Bool
+removeChunks :: Remover -> UUID -> ChunkConfig -> EncKey -> Key -> Annex ()
removeChunks remover u chunkconfig encryptor k = do
ls <- chunkKeys u chunkconfig k
- ok <- allM (remover . encryptor) (concat ls)
- when ok $ do
- let chunksizes = catMaybes $ map (fromKey keyChunkSize <=< headMaybe) ls
- forM_ chunksizes $ chunksRemoved u k . FixedSizeChunks . fromIntegral
- return ok
+ mapM_ (remover . encryptor) (concat ls)
+ let chunksizes = catMaybes $ map (fromKey keyChunkSize <=< headMaybe) ls
+ forM_ chunksizes $ chunksRemoved u k . FixedSizeChunks . fromIntegral
{- Retrieves a key from a remote, using a retriever action.
-
@@ -231,8 +216,8 @@ removeChunks remover u chunkconfig encryptor k = do
- other chunks in the list is fed to the sink.
-
- If retrival of one of the subsequent chunks throws an exception,
- - gives up and returns False. Note that partial data may have been
- - written to the sink in this case.
+ - gives up. Note that partial data may have been written to the sink
+ - in this case.
-
- Resuming is supported when using chunks. When the destination file
- already exists, it skips to the next chunked key that would be needed
@@ -246,33 +231,30 @@ retrieveChunks
-> Key
-> FilePath
-> MeterUpdate
- -> (Maybe Handle -> Maybe MeterUpdate -> ContentSource -> Annex Bool)
- -> Annex Bool
+ -> (Maybe Handle -> Maybe MeterUpdate -> ContentSource -> Annex ())
+ -> Annex ()
retrieveChunks retriever u chunkconfig encryptor basek dest basep sink
| noChunks chunkconfig =
-- Optimisation: Try the unchunked key first, to avoid
-- looking in the git-annex branch for chunk counts
-- that are likely not there.
getunchunked `catchNonAsync`
- const (go =<< chunkKeysOnly u basek)
- | otherwise = go =<< chunkKeys u chunkconfig basek
+ (\e -> go (Just e) =<< chunkKeysOnly u basek)
+ | otherwise = go Nothing =<< chunkKeys u chunkconfig basek
where
- go ls = do
+ go pe ls = do
currsize <- liftIO $ catchMaybeIO $ getFileSize dest
let ls' = maybe ls (setupResume ls) currsize
if any null ls'
- then return True -- dest is already complete
- else firstavail currsize ls' `catchNonAsync` unable
-
- unable e = do
- warning (show e)
- return False
+ then noop -- dest is already complete
+ else firstavail pe currsize ls'
- firstavail _ [] = return False
- firstavail currsize ([]:ls) = firstavail currsize ls
- firstavail currsize ((k:ks):ls)
+ firstavail Nothing _ [] = giveup "unable to determine the chunks to use for this remote"
+ firstavail (Just e) _ [] = throwM e
+ firstavail pe currsize ([]:ls) = firstavail pe currsize ls
+ firstavail _ currsize ((k:ks):ls)
| k == basek = getunchunked
- `catchNonAsync` (const $ firstavail currsize ls)
+ `catchNonAsync` (\e -> firstavail (Just e) currsize ls)
| otherwise = do
let offset = resumeOffset currsize k
let p = maybe basep
@@ -281,25 +263,22 @@ retrieveChunks retriever u chunkconfig encryptor basek dest basep sink
v <- tryNonAsync $
retriever (encryptor k) p $ \content ->
bracketIO (maybe opennew openresume offset) hClose $ \h -> do
- void $ tosink (Just h) p content
+ tosink (Just h) p content
let sz = toBytesProcessed $
fromMaybe 0 $ fromKey keyChunkSize k
getrest p h sz sz ks
- `catchNonAsync` unable
case v of
Left e
- | null ls -> unable e
- | otherwise -> firstavail currsize ls
+ | null ls -> throwM e
+ | otherwise -> firstavail (Just e) currsize ls
Right r -> return r
- getrest _ _ _ _ [] = return True
+ getrest _ _ _ _ [] = noop
getrest p h sz bytesprocessed (k:ks) = do
let p' = offsetMeterUpdate p bytesprocessed
liftIO $ p' zeroBytesProcessed
- ifM (retriever (encryptor k) p' $ tosink (Just h) p')
- ( getrest p h sz (addBytesProcessed bytesprocessed sz) ks
- , unable "chunk retrieval failed"
- )
+ retriever (encryptor k) p' $ tosink (Just h) p'
+ getrest p h sz (addBytesProcessed bytesprocessed sz) ks
getunchunked = retriever (encryptor basek) basep $ tosink Nothing basep
diff --git a/Remote/Helper/Chunked/Legacy.hs b/Remote/Helper/Chunked/Legacy.hs
index e7a7c5f..439620d 100644
--- a/Remote/Helper/Chunked/Legacy.hs
+++ b/Remote/Helper/Chunked/Legacy.hs
@@ -63,20 +63,15 @@ probeChunks basedest check = go [] $ map (basedest ++) chunkStream
- finalizer is called to rename the tmp into the dest
- (and do any other cleanup).
-}
-storeChunks :: Key -> FilePath -> FilePath -> ([FilePath] -> IO [FilePath]) -> (FilePath -> String -> IO ()) -> (FilePath -> FilePath -> IO ()) -> IO Bool
-storeChunks key tmp dest storer recorder finalizer = either onerr return
- =<< (E.try go :: IO (Either E.SomeException Bool))
+storeChunks :: Key -> FilePath -> FilePath -> ([FilePath] -> IO [FilePath]) -> (FilePath -> String -> IO ()) -> (FilePath -> FilePath -> IO ()) -> IO ()
+storeChunks key tmp dest storer recorder finalizer = do
+ stored <- storer tmpdests
+ let chunkcount = basef ++ chunkCount
+ recorder chunkcount (show $ length stored)
+ finalizer tmp dest
+ when (null stored) $
+ giveup "no chunks were stored"
where
- go = do
- stored <- storer tmpdests
- let chunkcount = basef ++ chunkCount
- recorder chunkcount (show $ length stored)
- finalizer tmp dest
- return (not $ null stored)
- onerr e = do
- warningIO (show e)
- return False
-
basef = tmp ++ fromRawFilePath (keyFile key)
tmpdests = map (basef ++ ) chunkStream
diff --git a/Remote/Helper/ExportImport.hs b/Remote/Helper/ExportImport.hs
index c5b7fbf..fdc3d2e 100644
--- a/Remote/Helper/ExportImport.hs
+++ b/Remote/Helper/ExportImport.hs
@@ -36,15 +36,15 @@ instance HasExportUnsupported (ParsedRemoteConfig -> RemoteGitConfig -> Annex Bo
instance HasExportUnsupported (ExportActions Annex) where
exportUnsupported = ExportActions
- { storeExport = \_ _ _ _ -> do
- warning "store export is unsupported"
- return False
- , retrieveExport = \_ _ _ _ -> return False
+ { storeExport = nope
+ , retrieveExport = nope
, checkPresentExport = \_ _ -> return False
- , removeExport = \_ _ -> return False
- , removeExportDirectory = Just $ \_ -> return False
+ , removeExport = nope
+ , removeExportDirectory = nope
, renameExport = \_ _ _ -> return Nothing
}
+ where
+ nope = giveup "export not supported"
-- | Use for remotes that do not support imports.
class HasImportUnsupported a where
@@ -56,12 +56,14 @@ instance HasImportUnsupported (ParsedRemoteConfig -> RemoteGitConfig -> Annex Bo
instance HasImportUnsupported (ImportActions Annex) where
importUnsupported = ImportActions
{ listImportableContents = return Nothing
- , retrieveExportWithContentIdentifier = \_ _ _ _ _ -> return Nothing
- , storeExportWithContentIdentifier = \_ _ _ _ _ -> return (Left "import not supported")
- , removeExportWithContentIdentifier = \_ _ _ -> return False
- , removeExportDirectoryWhenEmpty = Just $ \_ -> return False
+ , retrieveExportWithContentIdentifier = nope
+ , storeExportWithContentIdentifier = nope
+ , removeExportWithContentIdentifier = nope
+ , removeExportDirectoryWhenEmpty = nope
, checkPresentExportWithContentIdentifier = \_ _ _ -> return False
}
+ where
+ nope = giveup "import not supported"
exportIsSupported :: ParsedRemoteConfig -> RemoteGitConfig -> Annex Bool
exportIsSupported = \_ _ -> return True
@@ -151,16 +153,11 @@ adjustExportImport r rs = case getRemoteConfigValue exportTreeField (config r) o
oldks <- liftIO $ Export.getExportTreeKey exportdb loc
oldcids <- liftIO $ concat
<$> mapM (ContentIdentifier.getContentIdentifiers db rs) oldks
- storeExportWithContentIdentifier (importActions r') f k loc oldcids p >>= \case
- Left err -> do
- warning err
- return False
- Right newcid -> do
- withExclusiveLock gitAnnexContentIdentifierLock $ do
- liftIO $ ContentIdentifier.recordContentIdentifier db rs newcid k
- liftIO $ ContentIdentifier.flushDbQueue db
- recordContentIdentifier rs newcid k
- return True
+ newcid <- storeExportWithContentIdentifier (importActions r') f k loc oldcids p
+ withExclusiveLock gitAnnexContentIdentifierLock $ do
+ liftIO $ ContentIdentifier.recordContentIdentifier db rs newcid k
+ liftIO $ ContentIdentifier.flushDbQueue db
+ recordContentIdentifier rs newcid k
, removeExport = \k loc ->
removeExportWithContentIdentifier (importActions r') k loc
=<< keycids k
@@ -188,9 +185,8 @@ adjustExportImport r rs = case getRemoteConfigValue exportTreeField (config r) o
-- when another repository has already stored the
-- key, and the local repository does not know
-- about it. To avoid unnecessary costs, don't do it.
- { storeKey = \_ _ _ -> do
- warning "remote is configured with exporttree=yes; use `git-annex export` to store content on it"
- return False
+ { storeKey = \_ _ _ ->
+ giveup "remote is configured with exporttree=yes; use `git-annex export` to store content on it"
-- Keys can be retrieved using retrieveExport,
-- but since that retrieves from a path in the
-- remote that another writer could have replaced
@@ -203,24 +199,19 @@ adjustExportImport r rs = case getRemoteConfigValue exportTreeField (config r) o
, retrieveKeyFile = \k af dest p ->
let retrieveexport = retrieveKeyFileFromExport dbv k af dest p
in if appendonly r
- then do
- ret@(ok, _v) <- retrieveKeyFile r k af dest p
- if ok
- then return ret
- else retrieveexport
+ then retrieveKeyFile r k af dest p
+ `catchNonAsync` const retrieveexport
else retrieveexport
, retrieveKeyFileCheap = if appendonly r
then retrieveKeyFileCheap r
- else \_ _ _ -> return False
+ else Nothing
-- Removing a key from an export would need to
-- change the tree in the export log to not include
-- the file. Otherwise, conflicts when removing
-- files would not be dealt with correctly.
-- There does not seem to be a good use case for
-- removing a key from an export in any case.
- , removeKey = \_k -> do
- warning "dropping content from an export is not supported; use `git annex export` to export a tree that lacks the files you want to remove"
- return False
+ , removeKey = \_k -> giveup "dropping content from an export is not supported; use `git annex export` to export a tree that lacks the files you want to remove"
-- Can't lock content on exports, since they're
-- not key/value stores, and someone else could
-- change what's exported to a file at any time.
@@ -319,18 +310,15 @@ adjustExportImport r rs = case getRemoteConfigValue exportTreeField (config r) o
db <- getexportdb dbv
liftIO $ Export.getExportTree db k
- retrieveKeyFileFromExport dbv k _af dest p = unVerified $
- if maybe False (isJust . verifyKeyContent) (maybeLookupBackendVariety (fromKey keyVariety k))
- then do
- locs <- getexportlocs dbv k
- case locs of
- [] -> do
- ifM (liftIO $ atomically $ readTVar $ getexportinconflict dbv)
- ( warning "unknown export location, likely due to the export conflict"
- , warning "unknown export location"
- )
- return False
- (l:_) -> retrieveExport (exportActions r) k l dest p
- else do
- warning $ "exported content cannot be verified due to using the " ++ decodeBS (formatKeyVariety (fromKey keyVariety k)) ++ " backend"
- return False
+ retrieveKeyFileFromExport dbv k _af dest p
+ | maybe False (isJust . verifyKeyContent) (maybeLookupBackendVariety (fromKey keyVariety k)) = do
+ locs <- getexportlocs dbv k
+ case locs of
+ [] -> ifM (liftIO $ atomically $ readTVar $ getexportinconflict dbv)
+ ( giveup "unknown export location, likely due to the export conflict"
+ , giveup "unknown export location"
+ )
+ (l:_) -> do
+ retrieveExport (exportActions r) k l dest p
+ return UnVerified
+ | otherwise = giveup $ "exported content cannot be verified due to using the " ++ decodeBS (formatKeyVariety (fromKey keyVariety k)) ++ " backend"
diff --git a/Remote/Helper/Hooks.hs b/Remote/Helper/Hooks.hs
index 2b455f1..e6b061e 100644
--- a/Remote/Helper/Hooks.hs
+++ b/Remote/Helper/Hooks.hs
@@ -34,7 +34,9 @@ addHooks' r starthook stophook = r'
r' = r
{ storeKey = \k f p -> wrapper $ storeKey r k f p
, retrieveKeyFile = \k f d p -> wrapper $ retrieveKeyFile r k f d p
- , retrieveKeyFileCheap = \k af f -> wrapper $ retrieveKeyFileCheap r k af f
+ , retrieveKeyFileCheap = case retrieveKeyFileCheap r of
+ Just a -> Just $ \k af f -> wrapper $ a k af f
+ Nothing -> Nothing
, removeKey = wrapper . removeKey r
, checkPresent = wrapper . checkPresent r
}
diff --git a/Remote/Helper/Http.hs b/Remote/Helper/Http.hs
index 3d91a00..1b8a7c3 100644
--- a/Remote/Helper/Http.hs
+++ b/Remote/Helper/Http.hs
@@ -25,7 +25,7 @@ import Network.HTTP.Types
--
-- Implemented as a fileStorer, so that the content can be streamed
-- from the file in constant space.
-httpStorer :: (Key -> RequestBody -> Annex Bool) -> Storer
+httpStorer :: (Key -> RequestBody -> Annex ()) -> Storer
httpStorer a = fileStorer $ \k f m -> a k =<< liftIO (httpBodyStorer f m)
-- Reads the file and generates a streaming request body, that will update
diff --git a/Remote/Helper/P2P.hs b/Remote/Helper/P2P.hs
index 94ce0ce..1bd7b9a 100644
--- a/Remote/Helper/P2P.hs
+++ b/Remote/Helper/P2P.hs
@@ -1,6 +1,6 @@
{- Helpers for remotes using the git-annex P2P protocol.
-
- - Copyright 2016-2018 Joey Hess <id@joeyh.name>
+ - Copyright 2016-2020 Joey Hess <id@joeyh.name>
-
- Licensed under the GNU AGPL version 3 or higher.
-}
@@ -30,26 +30,31 @@ type ProtoConnRunner c = forall a. P2P.Proto a -> ClosableConnection c -> Annex
-- the pool when done.
type WithConn a c = (ClosableConnection c -> Annex (ClosableConnection c, a)) -> Annex a
-store :: (MeterUpdate -> ProtoRunner Bool) -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
+store :: (MeterUpdate -> ProtoRunner Bool) -> Key -> AssociatedFile -> MeterUpdate -> Annex ()
store runner k af p = do
let sizer = KeySizer k (fmap fst <$> prepSendAnnex k)
- metered (Just p) sizer $ \_ p' ->
- fromMaybe False
- <$> runner p' (P2P.put k af p')
+ metered (Just p) sizer $ \_ p' ->
+ runner p' (P2P.put k af p') >>= \case
+ Just True -> return ()
+ Just False -> giveup "transfer failed"
+ Nothing -> remoteUnavail
-retrieve :: (MeterUpdate -> ProtoRunner (Bool, Verification)) -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)
+retrieve :: (MeterUpdate -> ProtoRunner (Bool, Verification)) -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex Verification
retrieve runner k af dest p =
metered (Just p) k $ \m p' ->
- fromMaybe (False, UnVerified)
- <$> runner p' (P2P.get dest k af m p')
+ runner p' (P2P.get dest k af m p') >>= \case
+ Just (True, v) -> return v
+ Just (False, _) -> giveup "transfer failed"
+ Nothing -> remoteUnavail
-remove :: ProtoRunner Bool -> Key -> Annex Bool
-remove runner k = fromMaybe False <$> runner (P2P.remove k)
+remove :: ProtoRunner Bool -> Key -> Annex ()
+remove runner k = runner (P2P.remove k) >>= \case
+ Just True -> return ()
+ Just False -> giveup "removing content from remote failed"
+ Nothing -> remoteUnavail
checkpresent :: ProtoRunner Bool -> Key -> Annex Bool
-checkpresent runner k = maybe unavail return =<< runner (P2P.checkPresent k)
- where
- unavail = giveup "can't connect to remote"
+checkpresent runner k = maybe remoteUnavail return =<< runner (P2P.checkPresent k)
lock :: WithConn a c -> ProtoConnRunner c -> UUID -> Key -> (VerifiedCopy -> Annex a) -> Annex a
lock withconn connrunner u k callback = withconn $ \conn -> do
@@ -65,3 +70,6 @@ lock withconn connrunner u k callback = withconn $ \conn -> do
where
go False = giveup "can't lock content"
go True = withVerifiedCopy LockedCopy u (return True) callback
+
+remoteUnavail :: a
+remoteUnavail = giveup "can't connect to remote"
diff --git a/Remote/Helper/ReadOnly.hs b/Remote/Helper/ReadOnly.hs
index aad37b9..71e31fd 100644
--- a/Remote/Helper/ReadOnly.hs
+++ b/Remote/Helper/ReadOnly.hs
@@ -44,38 +44,35 @@ adjustReadOnly r
}
| otherwise = r
-readonlyStoreKey :: Key -> AssociatedFile -> MeterUpdate -> Annex Bool
+readonlyStoreKey :: Key -> AssociatedFile -> MeterUpdate -> Annex ()
readonlyStoreKey _ _ _ = readonlyFail
-readonlyRemoveKey :: Key -> Annex Bool
+readonlyRemoveKey :: Key -> Annex ()
readonlyRemoveKey _ = readonlyFail
readonlyStorer :: Storer
readonlyStorer _ _ _ = readonlyFail
-readonlyStoreExport :: FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex Bool
+readonlyStoreExport :: FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex ()
readonlyStoreExport _ _ _ _ = readonlyFail
-readonlyRemoveExport :: Key -> ExportLocation -> Annex Bool
+readonlyRemoveExport :: Key -> ExportLocation -> Annex ()
readonlyRemoveExport _ _ = readonlyFail
-readonlyRemoveExportDirectory :: ExportDirectory -> Annex Bool
+readonlyRemoveExportDirectory :: ExportDirectory -> Annex ()
readonlyRemoveExportDirectory _ = readonlyFail
-readonlyRenameExport :: Key -> ExportLocation -> ExportLocation -> Annex (Maybe Bool)
+readonlyRenameExport :: Key -> ExportLocation -> ExportLocation -> Annex (Maybe ())
readonlyRenameExport _ _ _ = return Nothing
-readonlyStoreExportWithContentIdentifier :: FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex (Either String ContentIdentifier)
-readonlyStoreExportWithContentIdentifier _ _ _ _ _ =
- return $ Left readonlyWarning
+readonlyStoreExportWithContentIdentifier :: FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex ContentIdentifier
+readonlyStoreExportWithContentIdentifier _ _ _ _ _ = readonlyFail
-readonlyRemoveExportWithContentIdentifier :: Key -> ExportLocation -> [ContentIdentifier] -> Annex Bool
+readonlyRemoveExportWithContentIdentifier :: Key -> ExportLocation -> [ContentIdentifier] -> Annex ()
readonlyRemoveExportWithContentIdentifier _ _ _ = readonlyFail
-readonlyFail :: Annex Bool
-readonlyFail = do
- warning readonlyWarning
- return False
+readonlyFail :: Annex a
+readonlyFail = giveup readonlyWarning
readonlyWarning :: String
readonlyWarning = "this remote is readonly"
diff --git a/Remote/Helper/Special.hs b/Remote/Helper/Special.hs
index a8b945c..18d0988 100644
--- a/Remote/Helper/Special.hs
+++ b/Remote/Helper/Special.hs
@@ -11,21 +11,17 @@ module Remote.Helper.Special (
findSpecialRemotes,
gitConfigSpecialRemote,
mkRetrievalVerifiableKeysSecure,
- Preparer,
Storer,
Retriever,
Remover,
CheckPresent,
- simplyPrepare,
ContentSource,
- checkPrepare,
- resourcePrepare,
fileStorer,
byteStorer,
fileRetriever,
byteRetriever,
storeKeyDummy,
- retreiveKeyFileDummy,
+ retrieveKeyFileDummy,
removeKeyDummy,
checkPresentDummy,
SpecialRemoteCfg(..),
@@ -50,7 +46,6 @@ import Config.Cost
import Utility.Metered
import Remote.Helper.Chunked as X
import Remote.Helper.Encryptable as X
-import Remote.Helper.Messages
import Annex.Content
import Messages.Progress
import qualified Git
@@ -93,25 +88,9 @@ mkRetrievalVerifiableKeysSecure gc
| remoteAnnexAllowUnverifiedDownloads gc = RetrievalAllKeysSecure
| otherwise = RetrievalVerifiableKeysSecure
--- Use when nothing needs to be done to prepare a helper.
-simplyPrepare :: helper -> Preparer helper
-simplyPrepare helper _ a = a $ Just helper
-
--- Use to run a check when preparing a helper.
-checkPrepare :: (Key -> Annex Bool) -> helper -> Preparer helper
-checkPrepare checker helper k a = ifM (checker k)
- ( a (Just helper)
- , a Nothing
- )
-
--- Use to acquire a resource when preparing a helper.
-resourcePrepare :: (Key -> (r -> Annex Bool) -> Annex Bool) -> (r -> helper) -> Preparer helper
-resourcePrepare withr helper k a = withr k $ \r ->
- a (Just (helper r))
-
-- A Storer that expects to be provided with a file containing
-- the content of the key to store.
-fileStorer :: (Key -> FilePath -> MeterUpdate -> Annex Bool) -> Storer
+fileStorer :: (Key -> FilePath -> MeterUpdate -> Annex ()) -> Storer
fileStorer a k (FileContent f) m = a k f m
fileStorer a k (ByteContent b) m = withTmp k $ \f -> do
liftIO $ L.writeFile f b
@@ -119,7 +98,7 @@ fileStorer a k (ByteContent b) m = withTmp k $ \f -> do
-- A Storer that expects to be provided with a L.ByteString of
-- the content to store.
-byteStorer :: (Key -> L.ByteString -> MeterUpdate -> Annex Bool) -> Storer
+byteStorer :: (Key -> L.ByteString -> MeterUpdate -> Annex ()) -> Storer
byteStorer a k c m = withBytes c $ \b -> a k b m
-- A Retriever that writes the content of a Key to a provided file.
@@ -133,7 +112,7 @@ fileRetriever a k m callback = do
-- A Retriever that generates a lazy ByteString containing the Key's
-- content, and passes it to a callback action which will fully consume it
-- before returning.
-byteRetriever :: (Key -> (L.ByteString -> Annex Bool) -> Annex Bool) -> Retriever
+byteRetriever :: (Key -> (L.ByteString -> Annex ()) -> Annex ()) -> Retriever
byteRetriever a k _m callback = a k (callback . ByteContent)
{- The base Remote that is provided to specialRemote needs to have
@@ -141,21 +120,21 @@ byteRetriever a k _m callback = a k (callback . ByteContent)
- but they are never actually used (since specialRemote replaces them).
- Here are some dummy ones.
-}
-storeKeyDummy :: Key -> AssociatedFile -> MeterUpdate -> Annex Bool
-storeKeyDummy _ _ _ = return False
-retreiveKeyFileDummy :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)
-retreiveKeyFileDummy _ _ _ _ = unVerified (return False)
-removeKeyDummy :: Key -> Annex Bool
-removeKeyDummy _ = return False
+storeKeyDummy :: Key -> AssociatedFile -> MeterUpdate -> Annex ()
+storeKeyDummy _ _ _ = error "missing storeKey implementation"
+retrieveKeyFileDummy :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex Verification
+retrieveKeyFileDummy _ _ _ _ = error "missing retrieveKeyFile implementation"
+removeKeyDummy :: Key -> Annex ()
+removeKeyDummy _ = error "missing removeKey implementation"
checkPresentDummy :: Key -> Annex Bool
checkPresentDummy _ = error "missing checkPresent implementation"
type RemoteModifier
= ParsedRemoteConfig
- -> Preparer Storer
- -> Preparer Retriever
- -> Preparer Remover
- -> Preparer CheckPresent
+ -> Storer
+ -> Retriever
+ -> Remover
+ -> CheckPresent
-> Remote
-> Remote
@@ -185,15 +164,17 @@ specialRemote :: RemoteModifier
specialRemote c = specialRemote' (specialRemoteCfg c) c
specialRemote' :: SpecialRemoteCfg -> RemoteModifier
-specialRemote' cfg c preparestorer prepareretriever prepareremover preparecheckpresent baser = encr
+specialRemote' cfg c storer retriever remover checkpresent baser = encr
where
encr = baser
{ storeKey = \k _f p -> cip >>= storeKeyGen k p
- , retrieveKeyFile = \k _f d p -> cip >>= unVerified . retrieveKeyFileGen k d p
- , retrieveKeyFileCheap = \k f d -> cip >>= maybe
- (retrieveKeyFileCheap baser k f d)
- -- retrieval of encrypted keys is never cheap
- (\_ -> return False)
+ , retrieveKeyFile = \k _f d p -> cip >>= retrieveKeyFileGen k d p
+ , retrieveKeyFileCheap = case retrieveKeyFileCheap baser of
+ Nothing -> Nothing
+ Just a
+ -- retrieval of encrypted keys is never cheap
+ | isencrypted -> Nothing
+ | otherwise -> Just $ \k f d -> a k f d
-- When encryption is used, the remote could provide
-- some other content encrypted by the user, and trick
-- git-annex into decrypting it, leaking the decryption
@@ -226,24 +207,18 @@ specialRemote' cfg c preparestorer prepareretriever prepareremover preparecheckp
cip = cipherKey c (gitconfig baser)
isencrypted = isEncrypted c
- safely a = catchNonAsync a (\e -> warning (show e) >> return False)
-
-- chunk, then encrypt, then feed to the storer
- storeKeyGen k p enc = safely $ preparestorer k $ safely . go
+ storeKeyGen k p enc = sendAnnex k rollback $ \src ->
+ displayprogress p k (Just src) $ \p' ->
+ storeChunks (uuid baser) chunkconfig enck k src p'
+ (storechunk enc)
+ checkpresent
where
- go (Just storer) = preparecheckpresent k $ safely . go' storer
- go Nothing = return False
- go' storer (Just checker) = sendAnnex k rollback $ \src ->
- displayprogress p k (Just src) $ \p' ->
- storeChunks (uuid baser) chunkconfig enck k src p'
- (storechunk enc storer)
- checker
- go' _ Nothing = return False
rollback = void $ removeKey encr k
enck = maybe id snd enc
- storechunk Nothing storer k content p = storer k content p
- storechunk (Just (cipher, enck)) storer k content p = do
+ storechunk Nothing k content p = storer k content p
+ storechunk (Just (cipher, enck)) k content p = do
cmd <- gpgCmd <$> Annex.getGitConfig
withBytes content $ \b ->
encrypt cmd encr cipher (feedBytes b) $
@@ -251,25 +226,22 @@ specialRemote' cfg c preparestorer prepareretriever prepareremover preparecheckp
storer (enck k) (ByteContent encb) p
-- call retriever to get chunks; decrypt them; stream to dest file
- retrieveKeyFileGen k dest p enc =
- safely $ prepareretriever k $ safely . go
- where
- go (Just retriever) = displayprogress p k Nothing $ \p' ->
+ retrieveKeyFileGen k dest p enc = do
+ displayprogress p k Nothing $ \p' ->
retrieveChunks retriever (uuid baser) chunkconfig
enck k dest p' (sink dest enc encr)
- go Nothing = return False
+ return UnVerified
+ where
enck = maybe id snd enc
- removeKeyGen k enc = safely $ prepareremover k $ safely . go
+ removeKeyGen k enc =
+ removeChunks remover (uuid baser) chunkconfig enck k
where
- go (Just remover) = removeChunks remover (uuid baser) chunkconfig enck k
- go Nothing = return False
enck = maybe id snd enc
- checkPresentGen k enc = preparecheckpresent k go
+ checkPresentGen k enc =
+ checkPresentChunks checkpresent (uuid baser) chunkconfig enck k
where
- go (Just checker) = checkPresentChunks checker (uuid baser) chunkconfig enck k
- go Nothing = cantCheck baser
enck = maybe id snd enc
chunkconfig = chunkConfig cfg
@@ -297,27 +269,25 @@ sink
-> Maybe Handle
-> Maybe MeterUpdate
-> ContentSource
- -> Annex Bool
-sink dest enc c mh mp content = do
- case (enc, mh, content) of
- (Nothing, Nothing, FileContent f)
- | f == dest -> noop
- | otherwise -> liftIO $ moveFile f dest
- (Just (cipher, _), _, ByteContent b) -> do
- cmd <- gpgCmd <$> Annex.getGitConfig
+ -> Annex ()
+sink dest enc c mh mp content = case (enc, mh, content) of
+ (Nothing, Nothing, FileContent f)
+ | f == dest -> noop
+ | otherwise -> liftIO $ moveFile f dest
+ (Just (cipher, _), _, ByteContent b) -> do
+ cmd <- gpgCmd <$> Annex.getGitConfig
+ decrypt cmd c cipher (feedBytes b) $
+ readBytes write
+ (Just (cipher, _), _, FileContent f) -> do
+ cmd <- gpgCmd <$> Annex.getGitConfig
+ withBytes content $ \b ->
decrypt cmd c cipher (feedBytes b) $
readBytes write
- (Just (cipher, _), _, FileContent f) -> do
- cmd <- gpgCmd <$> Annex.getGitConfig
- withBytes content $ \b ->
- decrypt cmd c cipher (feedBytes b) $
- readBytes write
- liftIO $ nukeFile f
- (Nothing, _, FileContent f) -> do
- withBytes content write
- liftIO $ nukeFile f
- (Nothing, _, ByteContent b) -> write b
- return True
+ liftIO $ nukeFile f
+ (Nothing, _, FileContent f) -> do
+ withBytes content write
+ liftIO $ nukeFile f
+ (Nothing, _, ByteContent b) -> write b
where
write b = case mh of
Just h -> liftIO $ b `streamto` h
diff --git a/Remote/Helper/Ssh.hs b/Remote/Helper/Ssh.hs
index 185ad4e..09637b4 100644
--- a/Remote/Helper/Ssh.hs
+++ b/Remote/Helper/Ssh.hs
@@ -106,8 +106,12 @@ inAnnex r k = do
dispatch _ = cantCheck r
{- Removes a key from a remote. -}
-dropKey :: Git.Repo -> Key -> Annex Bool
-dropKey r key = onRemote NoConsumeStdin r (\f p -> liftIO (boolSystem f p), return False) "dropkey"
+dropKey :: Git.Repo -> Key -> Annex ()
+dropKey r key = unlessM (dropKey' r key) $
+ giveup "unable to remove key from remote"
+
+dropKey' :: Git.Repo -> Key -> Annex Bool
+dropKey' r key = onRemote NoConsumeStdin r (\f p -> liftIO (boolSystem f p), return False) "dropkey"
[ Param "--quiet", Param "--force"
, Param $ serializeKey key
]
diff --git a/Remote/Hook.hs b/Remote/Hook.hs
index 71c06de..cc0ead3 100644
--- a/Remote/Hook.hs
+++ b/Remote/Hook.hs
@@ -50,17 +50,17 @@ gen r u rc gc rs = do
c <- parsedRemoteConfig remote rc
cst <- remoteCost gc expensiveRemoteCost
return $ Just $ specialRemote c
- (simplyPrepare $ store hooktype)
- (simplyPrepare $ retrieve hooktype)
- (simplyPrepare $ remove hooktype)
- (simplyPrepare $ checkKey r hooktype)
+ (store hooktype)
+ (retrieve hooktype)
+ (remove hooktype)
+ (checkKey r hooktype)
Remote
{ uuid = u
, cost = cst
, name = Git.repoDescribe r
, storeKey = storeKeyDummy
- , retrieveKeyFile = retreiveKeyFileDummy
- , retrieveKeyFileCheap = retrieveCheap hooktype
+ , retrieveKeyFile = retrieveKeyFileDummy
+ , retrieveKeyFileCheap = Nothing
-- A hook could use http and be vulnerable to
-- redirect to file:// attacks, etc.
, retrievalSecurityPolicy = mkRetrievalVerifiableKeysSecure gc
@@ -133,8 +133,17 @@ lookupHook hookname action = do
hook = annexConfig $ encodeBS' $ hookname ++ "-" ++ action ++ "-hook"
hookfallback = annexConfig $ encodeBS' $ hookname ++ "-hook"
-runHook :: HookName -> Action -> Key -> Maybe FilePath -> Annex Bool -> Annex Bool
-runHook hook action k f a = maybe (return False) run =<< lookupHook hook action
+runHook :: HookName -> Action -> Key -> Maybe FilePath -> Annex ()
+runHook hook action k f = lookupHook hook action >>= \case
+ Just command -> do
+ showOutput -- make way for hook output
+ environ <- liftIO (hookEnv action k f)
+ unlessM (progressCommandEnv "sh" [Param "-c", Param command] environ) $
+ giveup $ hook ++ " hook exited nonzero!"
+ Nothing -> giveup $ action ++ " hook misconfigured"
+
+runHook' :: HookName -> Action -> Key -> Maybe FilePath -> Annex Bool -> Annex Bool
+runHook' hook action k f a = maybe (return False) run =<< lookupHook hook action
where
run command = do
showOutput -- make way for hook output
@@ -146,19 +155,17 @@ runHook hook action k f a = maybe (return False) run =<< lookupHook hook action
)
store :: HookName -> Storer
-store h = fileStorer $ \k src _p ->
- runHook h "store" k (Just src) $ return True
+store h = fileStorer $ \k src _p -> runHook h "store" k (Just src)
retrieve :: HookName -> Retriever
retrieve h = fileRetriever $ \d k _p ->
- unlessM (runHook h "retrieve" k (Just d) $ return True) $
+ unlessM (runHook' h "retrieve" k (Just d) $ return True) $
giveup "failed to retrieve content"
-retrieveCheap :: HookName -> Key -> AssociatedFile -> FilePath -> Annex Bool
-retrieveCheap _ _ _ _ = return False
-
remove :: HookName -> Remover
-remove h k = runHook h "remove" k Nothing $ return True
+remove h k =
+ unlessM (runHook' h "remove" k Nothing $ return True) $
+ giveup "failed to remove content"
checkKey :: Git.Repo -> HookName -> CheckPresent
checkKey r h k = do
diff --git a/Remote/P2P.hs b/Remote/P2P.hs
index 009bca6..5016c9f 100644
--- a/Remote/P2P.hs
+++ b/Remote/P2P.hs
@@ -56,7 +56,7 @@ chainGen addr r u rc gc rs = do
, name = Git.repoDescribe r
, storeKey = store (const protorunner)
, retrieveKeyFile = retrieve (const protorunner)
- , retrieveKeyFileCheap = \_ _ _ -> return False
+ , retrieveKeyFileCheap = Nothing
, retrievalSecurityPolicy = RetrievalAllKeysSecure
, removeKey = remove protorunner
, lockContent = Just $ lock withconn runProtoConn u
diff --git a/Remote/Rsync.hs b/Remote/Rsync.hs
index 04d01e6..d8a9c37 100644
--- a/Remote/Rsync.hs
+++ b/Remote/Rsync.hs
@@ -80,17 +80,17 @@ gen r u rc gc rs = do
-- Rsync displays its own progress.
{ displayProgress = False }
return $ Just $ specialRemote' specialcfg c
- (simplyPrepare $ fileStorer $ store o)
- (simplyPrepare $ fileRetriever $ retrieve o)
- (simplyPrepare $ remove o)
- (simplyPrepare $ checkKey r o)
+ (fileStorer $ store o)
+ (fileRetriever $ retrieve o)
+ (remove o)
+ (checkKey r o)
Remote
{ uuid = u
, cost = cst
, name = Git.repoDescribe r
, storeKey = storeKeyDummy
- , retrieveKeyFile = retreiveKeyFileDummy
- , retrieveKeyFileCheap = retrieveCheap o
+ , retrieveKeyFile = retrieveKeyFileDummy
+ , retrieveKeyFileCheap = Just (retrieveCheap o)
, retrievalSecurityPolicy = RetrievalAllKeysSecure
, removeKey = removeKeyDummy
, lockContent = Nothing
@@ -201,7 +201,7 @@ rsyncSetup _ mu _ c gc = do
- (When we have the right hash directory structure, we can just
- pass --include=X --include=X/Y --include=X/Y/file --exclude=*)
-}
-store :: RsyncOpts -> Key -> FilePath -> MeterUpdate -> Annex Bool
+store :: RsyncOpts -> Key -> FilePath -> MeterUpdate -> Annex ()
store o k src meterupdate = storeGeneric o meterupdate basedest populatedest
where
basedest = fromRawFilePath $ Prelude.head (keyPaths k)
@@ -216,8 +216,13 @@ store o k src meterupdate = storeGeneric o meterupdate basedest populatedest
- object file, and has to be copied or hard linked into place. -}
canrename = isEncKey k || isChunkKey k
-storeGeneric :: RsyncOpts -> MeterUpdate -> FilePath -> (FilePath -> Annex Bool) -> Annex Bool
-storeGeneric o meterupdate basedest populatedest = withRsyncScratchDir $ \tmp -> do
+storeGeneric :: RsyncOpts -> MeterUpdate -> FilePath -> (FilePath -> Annex Bool) -> Annex ()
+storeGeneric o meterupdate basedest populatedest =
+ unlessM (storeGeneric' o meterupdate basedest populatedest) $
+ giveup "failed to rsync content"
+
+storeGeneric' :: RsyncOpts -> MeterUpdate -> FilePath -> (FilePath -> Annex Bool) -> Annex Bool
+storeGeneric' o meterupdate basedest populatedest = withRsyncScratchDir $ \tmp -> do
let dest = tmp </> basedest
createAnnexDirectory (parentDir dest)
ok <- populatedest dest
@@ -232,12 +237,13 @@ storeGeneric o meterupdate basedest populatedest = withRsyncScratchDir $ \tmp ->
else return False
retrieve :: RsyncOpts -> FilePath -> Key -> MeterUpdate -> Annex ()
-retrieve o f k p =
- unlessM (rsyncRetrieveKey o k f (Just p)) $
- giveup "rsync failed"
+retrieve o f k p = rsyncRetrieveKey o k f (Just p)
-retrieveCheap :: RsyncOpts -> Key -> AssociatedFile -> FilePath -> Annex Bool
-retrieveCheap o k _af f = ifM (preseedTmp k f) ( rsyncRetrieveKey o k f Nothing , return False )
+retrieveCheap :: RsyncOpts -> Key -> AssociatedFile -> FilePath -> Annex ()
+retrieveCheap o k _af f = ifM (preseedTmp k f)
+ ( rsyncRetrieveKey o k f Nothing
+ , giveup "cannot preseed rsync with existing content"
+ )
remove :: RsyncOpts -> Remover
remove o k = removeGeneric o includes
@@ -254,11 +260,11 @@ remove o k = removeGeneric o includes
- except for the specified includes. Due to the way rsync traverses
- directories, the includes must match both the file to be deleted, and
- its parent directories, but not their other contents. -}
-removeGeneric :: RsyncOpts -> [String] -> Annex Bool
+removeGeneric :: RsyncOpts -> [String] -> Annex ()
removeGeneric o includes = do
ps <- sendParams
opts <- rsyncOptions o
- withRsyncScratchDir $ \tmp -> liftIO $ do
+ ok <- withRsyncScratchDir $ \tmp -> liftIO $ do
{- Send an empty directory to rysnc to make it delete. -}
rsync $ opts ++ ps ++
map (\s -> Param $ "--include=" ++ s) includes ++
@@ -268,6 +274,8 @@ removeGeneric o includes = do
[ Param $ addTrailingPathSeparator tmp
, Param $ rsyncUrl o
]
+ unless ok $
+ giveup "rsync failed"
checkKey :: Git.Repo -> RsyncOpts -> CheckPresent
checkKey r o k = do
@@ -285,14 +293,14 @@ checkPresentGeneric o rsyncurls = do
proc "rsync" $ toCommand $ opts ++ [Param u]
return True
-storeExportM :: RsyncOpts -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex Bool
+storeExportM :: RsyncOpts -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex ()
storeExportM o src _k loc meterupdate =
storeGeneric o meterupdate basedest populatedest
where
basedest = fromRawFilePath (fromExportLocation loc)
populatedest = liftIO . createLinkOrCopy src
-retrieveExportM :: RsyncOpts -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Bool
+retrieveExportM :: RsyncOpts -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex ()
retrieveExportM o _k loc dest p = rsyncRetrieve o [rsyncurl] dest (Just p)
where
rsyncurl = mkRsyncUrl o (fromRawFilePath (fromExportLocation loc))
@@ -302,7 +310,7 @@ checkPresentExportM o _k loc = checkPresentGeneric o [rsyncurl]
where
rsyncurl = mkRsyncUrl o (fromRawFilePath (fromExportLocation loc))
-removeExportM :: RsyncOpts -> Key -> ExportLocation -> Annex Bool
+removeExportM :: RsyncOpts -> Key -> ExportLocation -> Annex ()
removeExportM o _k loc =
removeGeneric o $ includes $ fromRawFilePath $ fromExportLocation loc
where
@@ -310,7 +318,7 @@ removeExportM o _k loc =
Nothing -> []
Just f' -> includes f'
-removeExportDirectoryM :: RsyncOpts -> ExportDirectory -> Annex Bool
+removeExportDirectoryM :: RsyncOpts -> ExportDirectory -> Annex ()
removeExportDirectoryM o ed = removeGeneric o (allbelow d : includes d)
where
d = fromRawFilePath $ fromExportDirectory ed
@@ -319,7 +327,7 @@ removeExportDirectoryM o ed = removeGeneric o (allbelow d : includes d)
Nothing -> []
Just f' -> includes f'
-renameExportM :: RsyncOpts -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe Bool)
+renameExportM :: RsyncOpts -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe ())
renameExportM _ _ _ _ = return Nothing
{- Rsync params to enable resumes of sending files safely,
@@ -344,17 +352,21 @@ withRsyncScratchDir a = do
t <- fromRepo gitAnnexTmpObjectDir
withTmpDirIn t "rsynctmp" a
-rsyncRetrieve :: RsyncOpts -> [RsyncUrl] -> FilePath -> Maybe MeterUpdate -> Annex Bool
-rsyncRetrieve o rsyncurls dest meterupdate =
- showResumable $ untilTrue rsyncurls $ \u -> rsyncRemote Download o meterupdate
+rsyncRetrieve :: RsyncOpts -> [RsyncUrl] -> FilePath -> Maybe MeterUpdate -> Annex ()
+rsyncRetrieve o rsyncurls dest meterupdate =
+ unlessM go $
+ giveup "rsync failed"
+ where
+ go = showResumable $ untilTrue rsyncurls $ \u -> rsyncRemote Download o meterupdate
-- use inplace when retrieving to support resuming
[ Param "--inplace"
, Param u
, File dest
]
-rsyncRetrieveKey :: RsyncOpts -> Key -> FilePath -> Maybe MeterUpdate -> Annex Bool
-rsyncRetrieveKey o k dest meterupdate = rsyncRetrieve o (rsyncUrls o k) dest meterupdate
+rsyncRetrieveKey :: RsyncOpts -> Key -> FilePath -> Maybe MeterUpdate -> Annex ()
+rsyncRetrieveKey o k dest meterupdate =
+ rsyncRetrieve o (rsyncUrls o k) dest meterupdate
showResumable :: Annex Bool -> Annex Bool
showResumable a = ifM a
diff --git a/Remote/S3.hs b/Remote/S3.hs
index cb345d1..9bcd793 100644
--- a/Remote/S3.hs
+++ b/Remote/S3.hs
@@ -99,6 +99,8 @@ remote = specialRemoteType $ RemoteType
(FieldDesc "port to connect to")
, optionalStringParser requeststyleField
(FieldDesc "for path-style requests, set to \"path\"")
+ , signatureVersionParser signatureField
+ (FieldDesc "S3 signature version")
, optionalStringParser mungekeysField HiddenField
, optionalStringParser AWS.s3credsField HiddenField
]
@@ -148,6 +150,22 @@ protocolField = Accepted "protocol"
requeststyleField :: RemoteConfigField
requeststyleField = Accepted "requeststyle"
+signatureField :: RemoteConfigField
+signatureField = Accepted "signature"
+
+newtype SignatureVersion = SignatureVersion Int
+
+signatureVersionParser :: RemoteConfigField -> FieldDesc -> RemoteConfigFieldParser
+signatureVersionParser f fd =
+ genParser go f defver fd
+ (Just (ValueDesc "v2 or v4"))
+ where
+ go "v2" = Just (SignatureVersion 2)
+ go "v4" = Just (SignatureVersion 4)
+ go _ = Nothing
+
+ defver = SignatureVersion 2
+
portField :: RemoteConfigField
portField = Accepted "port"
@@ -164,10 +182,10 @@ gen r u rc gc rs = do
return $ new c cst info hdl magic
where
new c cst info hdl magic = Just $ specialRemote c
- (simplyPrepare $ store hdl this info magic)
- (simplyPrepare $ retrieve hdl this rs c info)
- (simplyPrepare $ remove hdl this info)
- (simplyPrepare $ checkKey hdl this rs c info)
+ (store hdl this info magic)
+ (retrieve hdl this rs c info)
+ (remove hdl this info)
+ (checkKey hdl this rs c info)
this
where
this = Remote
@@ -175,8 +193,8 @@ gen r u rc gc rs = do
, cost = cst
, name = Git.repoDescribe r
, storeKey = storeKeyDummy
- , retrieveKeyFile = retreiveKeyFileDummy
- , retrieveKeyFileCheap = retrieveCheap
+ , retrieveKeyFile = retrieveKeyFileDummy
+ , retrieveKeyFileCheap = Nothing
-- HttpManagerRestricted is used here, so this is
-- secure.
, retrievalSecurityPolicy = RetrievalAllKeysSecure
@@ -225,7 +243,7 @@ s3Setup ss mu mcreds c gc = do
s3Setup' ss u mcreds c gc
s3Setup' :: SetupStage -> UUID -> Maybe CredPair -> RemoteConfig -> RemoteGitConfig -> Annex (RemoteConfig, UUID)
-s3Setup' ss u mcreds c gc
+s3Setup' ss u mcreds c gc
| maybe False (isIAHost . fromProposedAccepted) (M.lookup hostField c) = archiveorg
| otherwise = defaulthost
where
@@ -245,21 +263,24 @@ s3Setup' ss u mcreds c gc
return (fullconfig, u)
defaulthost = do
- (c', encsetup) <- encryptionSetup c gc
- c'' <- setRemoteCredPair encsetup c' gc (AWS.creds u) mcreds
- let fullconfig = c'' `M.union` defaults
- pc <- either giveup return . parseRemoteConfig fullconfig
- =<< configParser remote fullconfig
- info <- extractS3Info pc
- checkexportimportsafe pc info
+ (c', encsetup) <- encryptionSetup (c `M.union` defaults) gc
+ pc <- either giveup return . parseRemoteConfig c'
+ =<< configParser remote c'
+ c'' <- setRemoteCredPair encsetup pc gc (AWS.creds u) mcreds
+ pc' <- either giveup return . parseRemoteConfig c''
+ =<< configParser remote c''
+ info <- extractS3Info pc'
+ checkexportimportsafe pc' info
case ss of
- Init -> genBucket pc gc u
+ Init -> genBucket pc' gc u
_ -> return ()
- use fullconfig pc info
+ use c'' pc' info
archiveorg = do
showNote "Internet Archive mode"
- c' <- setRemoteCredPair noEncryptionUsed c gc (AWS.creds u) mcreds
+ pc <- either giveup return . parseRemoteConfig c
+ =<< configParser remote c
+ c' <- setRemoteCredPair noEncryptionUsed pc gc (AWS.creds u) mcreds
-- Ensure user enters a valid bucket name, since
-- this determines the name of the archive.org item.
let validbucket = replace " " "-" $ map toLower $
@@ -274,14 +295,14 @@ s3Setup' ss u mcreds c gc
M.union c' $
-- special constraints on key names
M.insert mungekeysField (Proposed "ia") defaults
- pc <- either giveup return . parseRemoteConfig archiveconfig
+ pc' <- either giveup return . parseRemoteConfig archiveconfig
=<< configParser remote archiveconfig
- info <- extractS3Info pc
- checkexportimportsafe pc info
- hdl <- mkS3HandleVar pc gc u
+ info <- extractS3Info pc'
+ checkexportimportsafe pc' info
+ hdl <- mkS3HandleVar pc' gc u
withS3HandleOrFail u hdl $
- writeUUIDFile pc u info
- use archiveconfig pc info
+ writeUUIDFile pc' u info
+ use archiveconfig pc' info
checkexportimportsafe c' info =
unlessM (Annex.getState Annex.force) $
@@ -304,7 +325,6 @@ store mh r info magic = fileStorer $ \k f p -> withS3HandleOrFail (uuid r) mh $
-- Store public URL to item in Internet Archive.
when (isIA info && not (isChunkKey k)) $
setUrlPresent k (iaPublicUrl info (bucketObject info k))
- return True
storeHelper :: S3Info -> S3Handle -> Maybe Magic -> FilePath -> S3.Object -> MeterUpdate -> Annex (Maybe S3Etag, Maybe S3VersionID)
storeHelper info h magic f object p = liftIO $ case partSize info of
@@ -401,14 +421,11 @@ retrieveHelper' h f p req = liftIO $ runResourceT $ do
S3.GetObjectResponse { S3.gorResponse = rsp } <- sendS3Handle h req
Url.sinkResponseFile p zeroBytesProcessed f WriteMode rsp
-retrieveCheap :: Key -> AssociatedFile -> FilePath -> Annex Bool
-retrieveCheap _ _ _ = return False
-
remove :: S3HandleVar -> Remote -> S3Info -> Remover
-remove hv r info k = withS3HandleOrFail (uuid r) hv $ \h -> liftIO $ runResourceT $ do
- res <- tryNonAsync $ sendS3Handle h $
+remove hv r info k = withS3HandleOrFail (uuid r) hv $ \h -> do
+ S3.DeleteObjectResponse <- liftIO $ runResourceT $ sendS3Handle h $
S3.DeleteObject (T.pack $ bucketObject info k) (bucket info)
- return $ either (const False) (const True) res
+ return ()
checkKey :: S3HandleVar -> Remote -> RemoteStateHandle -> ParsedRemoteConfig -> S3Info -> CheckPresent
checkKey hv r rs c info k = withS3Handle hv $ \case
@@ -446,51 +463,42 @@ checkKeyHelper' info h o limit = liftIO $ runResourceT $ do
where
req = limit $ S3.headObject (bucket info) o
-storeExportS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex Bool
-storeExportS3 hv r rs info magic f k loc p = fst
- <$> storeExportS3' hv r rs info magic f k loc p
+storeExportS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex ()
+storeExportS3 hv r rs info magic f k loc p = void $ storeExportS3' hv r rs info magic f k loc p
-storeExportS3' :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex (Bool, (Maybe S3Etag, Maybe S3VersionID))
+storeExportS3' :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex (Maybe S3Etag, Maybe S3VersionID)
storeExportS3' hv r rs info magic f k loc p = withS3Handle hv $ \case
- Just h -> catchNonAsync (go h) (\e -> warning (show e) >> return (False, (Nothing, Nothing)))
- Nothing -> do
- warning $ needS3Creds (uuid r)
- return (False, (Nothing, Nothing))
+ Just h -> go h
+ Nothing -> giveup $ needS3Creds (uuid r)
where
go h = do
let o = T.pack $ bucketExportLocation info loc
(metag, mvid) <- storeHelper info h magic f o p
setS3VersionID info rs k mvid
- return (True, (metag, mvid))
+ return (metag, mvid)
-retrieveExportS3 :: S3HandleVar -> Remote -> S3Info -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Bool
-retrieveExportS3 hv r info _k loc f p =
- catchNonAsync go (\e -> warning (show e) >> return False)
- where
- go = withS3Handle hv $ \case
- Just h -> do
- retrieveHelper info h (Left (T.pack exportloc)) f p
- return True
+retrieveExportS3 :: S3HandleVar -> Remote -> S3Info -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex ()
+retrieveExportS3 hv r info _k loc f p = do
+ withS3Handle hv $ \case
+ Just h -> retrieveHelper info h (Left (T.pack exportloc)) f p
Nothing -> case getPublicUrlMaker info of
- Nothing -> do
- warning $ needS3Creds (uuid r)
- return False
- Just geturl -> Url.withUrlOptions $
- Url.download p (geturl exportloc) f
+ Just geturl -> either giveup return =<<
+ Url.withUrlOptions
+ (Url.download' p (geturl exportloc) f)
+ Nothing -> giveup $ needS3Creds (uuid r)
+ where
exportloc = bucketExportLocation info loc
-removeExportS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Key -> ExportLocation -> Annex Bool
+removeExportS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Key -> ExportLocation -> Annex ()
removeExportS3 hv r rs info k loc = withS3Handle hv $ \case
- Just h -> checkVersioning info rs k $
- catchNonAsync (go h) (\e -> warning (show e) >> return False)
- Nothing -> do
- warning $ needS3Creds (uuid r)
- return False
+ Just h -> do
+ checkVersioning info rs k
+ liftIO $ runResourceT $ do
+ S3.DeleteObjectResponse <- sendS3Handle h $
+ S3.DeleteObject (T.pack $ bucketExportLocation info loc) (bucket info)
+ return ()
+ Nothing -> giveup $ needS3Creds (uuid r)
where
- go h = liftIO $ runResourceT $ do
- res <- tryNonAsync $ sendS3Handle h $
- S3.DeleteObject (T.pack $ bucketExportLocation info loc) (bucket info)
- return $ either (const False) (const True) res
checkPresentExportS3 :: S3HandleVar -> Remote -> S3Info -> Key -> ExportLocation -> Annex Bool
checkPresentExportS3 hv r info k loc = withS3Handle hv $ \case
@@ -503,15 +511,14 @@ checkPresentExportS3 hv r info k loc = withS3Handle hv $ \case
giveup "No S3 credentials configured"
-- S3 has no move primitive; copy and delete.
-renameExportS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe Bool)
+renameExportS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe ())
renameExportS3 hv r rs info k src dest = Just <$> go
where
go = withS3Handle hv $ \case
- Just h -> checkVersioning info rs k $
- catchNonAsync (go' h) (\_ -> return False)
- Nothing -> do
- warning $ needS3Creds (uuid r)
- return False
+ Just h -> do
+ checkVersioning info rs k
+ go' h
+ Nothing -> giveup $ needS3Creds (uuid r)
go' h = liftIO $ runResourceT $ do
let co = S3.copyObject (bucket info) dstobject
@@ -520,7 +527,6 @@ renameExportS3 hv r rs info k src dest = Just <$> go
-- ACL is not preserved by copy.
void $ sendS3Handle h $ co { S3.coAcl = acl info }
void $ sendS3Handle h $ S3.DeleteObject srcobject (bucket info)
- return True
srcobject = T.pack $ bucketExportLocation info src
dstobject = T.pack $ bucketExportLocation info dest
@@ -623,21 +629,18 @@ mkImportableContentsVersioned info = build . groupfiles
| otherwise =
i : removemostrecent mtime rest
-retrieveExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> ExportLocation -> ContentIdentifier -> FilePath -> Annex (Maybe Key) -> MeterUpdate -> Annex (Maybe Key)
+retrieveExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> ExportLocation -> ContentIdentifier -> FilePath -> Annex Key -> MeterUpdate -> Annex Key
retrieveExportWithContentIdentifierS3 hv r rs info loc cid dest mkkey p = withS3Handle hv $ \case
- Nothing -> do
- warning $ needS3Creds (uuid r)
- return Nothing
- Just h -> flip catchNonAsync (\e -> warning (show e) >> return Nothing) $ do
+ Just h -> do
rewritePreconditionException $ retrieveHelper' h dest p $
limitGetToContentIdentifier cid $
S3.getObject (bucket info) o
- mk <- mkkey
- case (mk, extractContentIdentifier cid o) of
- (Just k, Right vid) ->
- setS3VersionID info rs k vid
- _ -> noop
- return mk
+ k <- mkkey
+ case extractContentIdentifier cid o of
+ Right vid -> setS3VersionID info rs k vid
+ Left _ -> noop
+ return k
+ Nothing -> giveup $ needS3Creds (uuid r)
where
o = T.pack $ bucketExportLocation info loc
@@ -657,7 +660,7 @@ rewritePreconditionException a = catchJust (Url.matchStatusCodeException want) a
--
-- When the bucket is not versioned, data loss can result.
-- This is why that configuration requires --force to enable.
-storeExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex (Either String ContentIdentifier)
+storeExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex ContentIdentifier
storeExportWithContentIdentifierS3 hv r rs info magic src k loc _overwritablecids p
| versioning info = go
-- FIXME Actual aws version that supports getting Etag for a store
@@ -666,18 +669,16 @@ storeExportWithContentIdentifierS3 hv r rs info magic src k loc _overwritablecid
#if MIN_VERSION_aws(0,99,0)
| otherwise = go
#else
- | otherwise = return $
- Left "git-annex is built with too old a version of the aws library to support this operation"
+ | otherwise = giveup "git-annex is built with too old a version of the aws library to support this operation"
#endif
where
go = storeExportS3' hv r rs info magic src k loc p >>= \case
- (False, _) -> return $ Left "failed to store content in S3 bucket"
- (True, (_, Just vid)) -> return $ Right $
+ (_, Just vid) -> return $
mkS3VersionedContentIdentifier vid
- (True, (Just etag, Nothing)) -> return $ Right $
+ (Just etag, Nothing) -> return $
mkS3UnversionedContentIdentifier etag
- (True, (Nothing, Nothing)) ->
- return $ Left "did not get ETag for store to S3 bucket"
+ (Nothing, Nothing) ->
+ giveup "did not get ETag for store to S3 bucket"
-- Does not guarantee that the removed object has the content identifier,
-- but when the bucket is versioned, the removed object content can still
@@ -685,7 +686,7 @@ storeExportWithContentIdentifierS3 hv r rs info magic src k loc _overwritablecid
--
-- When the bucket is not versioned, data loss can result.
-- This is why that configuration requires --force to enable.
-removeExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Key -> ExportLocation -> [ContentIdentifier] -> Annex Bool
+removeExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Key -> ExportLocation -> [ContentIdentifier] -> Annex ()
removeExportWithContentIdentifierS3 hv r rs info k loc _removeablecids =
removeExportS3 hv r rs info k loc
@@ -877,7 +878,10 @@ s3Configuration c = cfg
Nothing
| port == 443 -> AWS.HTTPS
| otherwise -> AWS.HTTP
- cfg = S3.s3 proto endpoint False
+ cfg = case getRemoteConfigValue signatureField c of
+ Just (SignatureVersion 4) ->
+ S3.s3v4 proto endpoint False S3.SignWithEffort
+ _ -> S3.s3 proto endpoint False
data S3Info = S3Info
{ bucket :: S3.Bucket
@@ -1282,12 +1286,9 @@ enableBucketVersioning ss info _ _ _ = do
-- were created without versioning, some unversioned files exported to
-- them, and then versioning enabled, and this is to avoid data loss in
-- those cases.
-checkVersioning :: S3Info -> RemoteStateHandle -> Key -> Annex Bool -> Annex Bool
-checkVersioning info rs k a
+checkVersioning :: S3Info -> RemoteStateHandle -> Key -> Annex ()
+checkVersioning info rs k
| versioning info = getS3VersionID rs k >>= \case
- [] -> do
- warning $ "Remote is configured to use versioning, but no S3 version ID is recorded for this key, so it cannot safely be modified."
- return False
- _ -> a
- | otherwise = a
-
+ [] -> giveup "Remote is configured to use versioning, but no S3 version ID is recorded for this key, so it cannot safely be modified."
+ _ -> return ()
+ | otherwise = return ()
diff --git a/Remote/Tahoe.hs b/Remote/Tahoe.hs
index 6521cd7..693eaaf 100644
--- a/Remote/Tahoe.hs
+++ b/Remote/Tahoe.hs
@@ -87,7 +87,7 @@ gen r u rc gc rs = do
, name = Git.repoDescribe r
, storeKey = store rs hdl
, retrieveKeyFile = retrieve rs hdl
- , retrieveKeyFileCheap = \_ _ _ -> return False
+ , retrieveKeyFileCheap = Nothing
-- Tahoe cryptographically verifies content.
, retrievalSecurityPolicy = RetrievalAllKeysSecure
, removeKey = remove
@@ -135,22 +135,23 @@ tahoeSetup _ mu _ c _ = do
where
missingfurl = giveup "Set TAHOE_FURL to the introducer furl to use."
-store :: RemoteStateHandle -> TahoeHandle -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
+store :: RemoteStateHandle -> TahoeHandle -> Key -> AssociatedFile -> MeterUpdate -> Annex ()
store rs hdl k _f _p = sendAnnex k noop $ \src ->
parsePut <$> liftIO (readTahoe hdl "put" [File src]) >>= maybe
- (return False)
- (\cap -> storeCapability rs k cap >> return True)
+ (giveup "tahoe failed to store content")
+ (\cap -> storeCapability rs k cap)
-retrieve :: RemoteStateHandle -> TahoeHandle -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)
-retrieve rs hdl k _f d _p = unVerified $ go =<< getCapability rs k
+retrieve :: RemoteStateHandle -> TahoeHandle -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex Verification
+retrieve rs hdl k _f d _p = do
+ go =<< getCapability rs k
+ return UnVerified
where
- go Nothing = return False
- go (Just cap) = liftIO $ requestTahoe hdl "get" [Param cap, File d]
+ go Nothing = giveup "tahoe capability is not known"
+ go (Just cap) = unlessM (liftIO $ requestTahoe hdl "get" [Param cap, File d]) $
+ giveup "tahoe failed to reteieve content"
-remove :: Key -> Annex Bool
-remove _k = do
- warning "content cannot be removed from tahoe remote"
- return False
+remove :: Key -> Annex ()
+remove _k = giveup "content cannot be removed from tahoe remote"
checkKey :: RemoteStateHandle -> TahoeHandle -> Key -> Annex Bool
checkKey rs hdl k = go =<< getCapability rs k
diff --git a/Remote/Web.hs b/Remote/Web.hs
index 603306b..984ce58 100644
--- a/Remote/Web.hs
+++ b/Remote/Web.hs
@@ -52,7 +52,7 @@ gen r _ rc gc rs = do
, name = Git.repoDescribe r
, storeKey = uploadKey
, retrieveKeyFile = downloadKey
- , retrieveKeyFileCheap = downloadKeyCheap
+ , retrieveKeyFileCheap = Nothing
-- HttpManagerRestricted is used here, so this is
-- secure.
, retrievalSecurityPolicy = RetrievalAllKeysSecure
@@ -80,32 +80,28 @@ gen r _ rc gc rs = do
, remoteStateHandle = rs
}
-downloadKey :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)
-downloadKey key _af dest p = unVerified $ get =<< getWebUrls key
+downloadKey :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex Verification
+downloadKey key _af dest p = do
+ get =<< getWebUrls key
+ return UnVerified
where
- get [] = do
- warning "no known url"
- return False
- get urls = untilTrue urls $ \u -> do
- let (u', downloader) = getDownloader u
- case downloader of
- YoutubeDownloader -> do
- showOutput
- youtubeDlTo key u' dest
- _ -> Url.withUrlOptions $ downloadUrl key p [u'] dest
+ get [] = giveup "no known url"
+ get urls = do
+ r <- untilTrue urls $ \u -> do
+ let (u', downloader) = getDownloader u
+ case downloader of
+ YoutubeDownloader -> do
+ showOutput
+ youtubeDlTo key u' dest
+ _ -> Url.withUrlOptions $ downloadUrl key p [u'] dest
+ unless r $
+ giveup "download failed"
-downloadKeyCheap :: Key -> AssociatedFile -> FilePath -> Annex Bool
-downloadKeyCheap _ _ _ = return False
+uploadKey :: Key -> AssociatedFile -> MeterUpdate -> Annex ()
+uploadKey _ _ _ = giveup "upload to web not supported"
-uploadKey :: Key -> AssociatedFile -> MeterUpdate -> Annex Bool
-uploadKey _ _ _ = do
- warning "upload to web not supported"
- return False
-
-dropKey :: Key -> Annex Bool
-dropKey k = do
- mapM_ (setUrlMissing k) =<< getWebUrls k
- return True
+dropKey :: Key -> Annex ()
+dropKey k = mapM_ (setUrlMissing k) =<< getWebUrls k
checkKey :: Key -> Annex Bool
checkKey key = do
diff --git a/Remote/WebDAV.hs b/Remote/WebDAV.hs
index 59b843d..018987d 100644
--- a/Remote/WebDAV.hs
+++ b/Remote/WebDAV.hs
@@ -73,10 +73,10 @@ gen r u rc gc rs = do
<*> mkDavHandleVar c gc u
where
new c cst hdl = Just $ specialRemote c
- (simplyPrepare $ store hdl chunkconfig)
- (simplyPrepare $ retrieve hdl chunkconfig)
- (simplyPrepare $ remove hdl)
- (simplyPrepare $ checkKey hdl this chunkconfig)
+ (store hdl chunkconfig)
+ (retrieve hdl chunkconfig)
+ (remove hdl)
+ (checkKey hdl this chunkconfig)
this
where
this = Remote
@@ -84,8 +84,8 @@ gen r u rc gc rs = do
, cost = cst
, name = Git.repoDescribe r
, storeKey = storeKeyDummy
- , retrieveKeyFile = retreiveKeyFileDummy
- , retrieveKeyFileCheap = retrieveCheap
+ , retrieveKeyFile = retrieveKeyFileDummy
+ , retrieveKeyFileCheap = Nothing
-- HttpManagerRestricted is used here, so this is
-- secure.
, retrievalSecurityPolicy = RetrievalAllKeysSecure
@@ -134,23 +134,18 @@ webdavSetup _ mu mcreds c gc = do
creds <- maybe (getCreds pc gc u) (return . Just) mcreds
testDav url creds
gitConfigSpecialRemote u c' [("webdav", "true")]
- c'' <- setRemoteCredPair encsetup c' gc (davCreds u) creds
+ c'' <- setRemoteCredPair encsetup pc gc (davCreds u) creds
return (c'', u)
store :: DavHandleVar -> ChunkConfig -> Storer
store hv (LegacyChunks chunksize) = fileStorer $ \k f p ->
- withDavHandle hv $ \case
- Nothing -> return False
- Just dav -> liftIO $
- withMeteredFile f p $ storeLegacyChunked chunksize k dav
+ withDavHandle hv $ \dav -> liftIO $
+ withMeteredFile f p $ storeLegacyChunked chunksize k dav
store hv _ = httpStorer $ \k reqbody ->
- withDavHandle hv $ \case
- Nothing -> return False
- Just dav -> liftIO $ goDAV dav $ do
- let tmp = keyTmpLocation k
- let dest = keyLocation k
- storeHelper dav tmp dest reqbody
- return True
+ withDavHandle hv $ \dav -> liftIO $ goDAV dav $ do
+ let tmp = keyTmpLocation k
+ let dest = keyLocation k
+ storeHelper dav tmp dest reqbody
storeHelper :: DavHandle -> DavLocation -> DavLocation -> RequestBody -> DAVT IO ()
storeHelper dav tmp dest reqbody = do
@@ -167,17 +162,12 @@ finalizeStore dav tmp dest = do
maybe noop (void . mkColRecursive) (locationParent dest)
moveDAV (baseURL dav) tmp dest
-retrieveCheap :: Key -> AssociatedFile -> FilePath -> Annex Bool
-retrieveCheap _ _ _ = return False
-
retrieve :: DavHandleVar -> ChunkConfig -> Retriever
retrieve hv cc = fileRetriever $ \d k p ->
- withDavHandle hv $ \case
- Nothing -> giveup "unable to connect"
- Just dav -> case cc of
- LegacyChunks _ -> retrieveLegacyChunked d k p dav
- _ -> liftIO $
- goDAV dav $ retrieveHelper (keyLocation k) d p
+ withDavHandle hv $ \dav -> case cc of
+ LegacyChunks _ -> retrieveLegacyChunked d k p dav
+ _ -> liftIO $
+ goDAV dav $ retrieveHelper (keyLocation k) d p
retrieveHelper :: DavLocation -> FilePath -> MeterUpdate -> DAVT IO ()
retrieveHelper loc d p = do
@@ -186,100 +176,86 @@ retrieveHelper loc d p = do
withContentM $ httpBodyRetriever d p
remove :: DavHandleVar -> Remover
-remove hv k = withDavHandle hv $ \case
- Nothing -> return False
- Just dav -> liftIO $ goDAV dav $
- -- Delete the key's whole directory, including any
- -- legacy chunked files, etc, in a single action.
- removeHelper (keyDir k)
-
-removeHelper :: DavLocation -> DAVT IO Bool
+remove hv k = withDavHandle hv $ \dav -> liftIO $ goDAV dav $
+ -- Delete the key's whole directory, including any
+ -- legacy chunked files, etc, in a single action.
+ removeHelper (keyDir k)
+
+removeHelper :: DavLocation -> DAVT IO ()
removeHelper d = do
debugDav $ "delContent " ++ d
v <- safely $ inLocation d delContentM
case v of
- Just _ -> return True
+ Just _ -> return ()
Nothing -> do
v' <- existsDAV d
case v' of
- Right False -> return True
- _ -> return False
+ Right False -> return ()
+ _ -> giveup "failed to remove content from remote"
checkKey :: DavHandleVar -> Remote -> ChunkConfig -> CheckPresent
-checkKey hv r chunkconfig k = withDavHandle hv $ \case
- Nothing -> giveup $ name r ++ " not configured"
- Just dav -> do
- showChecking r
- case chunkconfig of
- LegacyChunks _ -> checkKeyLegacyChunked dav k
- _ -> do
- v <- liftIO $ goDAV dav $
- existsDAV (keyLocation k)
- either giveup return v
-
-storeExportDav :: DavHandleVar -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex Bool
+checkKey hv r chunkconfig k = withDavHandle hv $ \dav -> do
+ showChecking r
+ case chunkconfig of
+ LegacyChunks _ -> checkKeyLegacyChunked dav k
+ _ -> do
+ v <- liftIO $ goDAV dav $
+ existsDAV (keyLocation k)
+ either giveup return v
+
+storeExportDav :: DavHandleVar -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex ()
storeExportDav hdl f k loc p = case exportLocation loc of
- Right dest -> withDavHandle hdl $ \mh -> runExport mh $ \dav -> do
+ Right dest -> withDavHandle hdl $ \h -> runExport h $ \dav -> do
reqbody <- liftIO $ httpBodyStorer f p
storeHelper dav (keyTmpLocation k) dest reqbody
- return True
- Left err -> do
- warning err
- return False
+ Left err -> giveup err
-retrieveExportDav :: DavHandleVar -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Bool
+retrieveExportDav :: DavHandleVar -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex ()
retrieveExportDav hdl _k loc d p = case exportLocation loc of
- Right src -> withDavHandle hdl $ \mh -> runExport mh $ \_dav -> do
+ Right src -> withDavHandle hdl $ \h -> runExport h $ \_dav ->
retrieveHelper src d p
- return True
- Left _err -> return False
+ Left err -> giveup err
checkPresentExportDav :: DavHandleVar -> Remote -> Key -> ExportLocation -> Annex Bool
-checkPresentExportDav hdl r _k loc = case exportLocation loc of
- Right p -> withDavHandle hdl $ \case
- Nothing -> giveup $ name r ++ " not configured"
- Just h -> liftIO $ do
- v <- goDAV h $ existsDAV p
- either giveup return v
+checkPresentExportDav hdl _ _k loc = case exportLocation loc of
+ Right p -> withDavHandle hdl $ \h -> liftIO $ do
+ v <- goDAV h $ existsDAV p
+ either giveup return v
Left err -> giveup err
-removeExportDav :: DavHandleVar-> Key -> ExportLocation -> Annex Bool
+removeExportDav :: DavHandleVar-> Key -> ExportLocation -> Annex ()
removeExportDav hdl _k loc = case exportLocation loc of
- Right p -> withDavHandle hdl $ \mh -> runExport mh $ \_dav ->
+ Right p -> withDavHandle hdl $ \h -> runExport h $ \_dav ->
removeHelper p
-- When the exportLocation is not legal for webdav,
-- the content is certianly not stored there, so it's ok for
-- removal to succeed. This allows recovery after failure to store
-- content there, as the user can rename the problem file and
-- this will be called to make sure it's gone.
- Left _err -> return True
+ Left _err -> return ()
-removeExportDirectoryDav :: DavHandleVar -> ExportDirectory -> Annex Bool
-removeExportDirectoryDav hdl dir = withDavHandle hdl $ \mh -> runExport mh $ \_dav -> do
+removeExportDirectoryDav :: DavHandleVar -> ExportDirectory -> Annex ()
+removeExportDirectoryDav hdl dir = withDavHandle hdl $ \h -> runExport h $ \_dav -> do
let d = fromRawFilePath $ fromExportDirectory dir
debugDav $ "delContent " ++ d
- safely (inLocation d delContentM)
- >>= maybe (return False) (const $ return True)
+ inLocation d delContentM
-renameExportDav :: DavHandleVar -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe Bool)
+renameExportDav :: DavHandleVar -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe ())
renameExportDav hdl _k src dest = case (exportLocation src, exportLocation dest) of
- (Right srcl, Right destl) -> withDavHandle hdl $ \case
- Just h
- -- box.com's DAV endpoint has buggy handling of renames,
- -- so avoid renaming when using it.
- | boxComUrl `isPrefixOf` baseURL h -> return Nothing
- | otherwise -> do
- v <- runExport (Just h) $ \dav -> do
- maybe noop (void . mkColRecursive) (locationParent destl)
- moveDAV (baseURL dav) srcl destl
- return True
- return (Just v)
- Nothing -> return (Just False)
- _ -> return (Just False)
-
-runExport :: Maybe DavHandle -> (DavHandle -> DAVT IO Bool) -> Annex Bool
-runExport Nothing _ = return False
-runExport (Just h) a = fromMaybe False <$> liftIO (goDAV h $ safely (a h))
+ (Right srcl, Right destl) -> withDavHandle hdl $ \h ->
+ -- box.com's DAV endpoint has buggy handling of renames,
+ -- so avoid renaming when using it.
+ if boxComUrl `isPrefixOf` baseURL h
+ then return Nothing
+ else runExport h $ \dav -> do
+ maybe noop (void . mkColRecursive) (locationParent destl)
+ moveDAV (baseURL dav) srcl destl
+ return (Just ())
+ (Left err, _) -> giveup err
+ (_, Left err) -> giveup err
+
+runExport :: DavHandle -> (DavHandle -> DAVT IO a) -> Annex a
+runExport h a = liftIO (goDAV h (a h))
configUrl :: ParsedRemoteConfig -> Maybe URLString
configUrl c = fixup <$> getRemoteConfigValue urlField c
@@ -418,7 +394,7 @@ choke f = do
data DavHandle = DavHandle DAVContext DavUser DavPass URLString
-type DavHandleVar = TVar (Either (Annex (Maybe DavHandle)) (Maybe DavHandle))
+type DavHandleVar = TVar (Either (Annex (Either String DavHandle)) (Either String DavHandle))
{- Prepares a DavHandle for later use. Does not connect to the server or do
- anything else expensive. -}
@@ -429,16 +405,16 @@ mkDavHandleVar c gc u = liftIO $ newTVarIO $ Left $ do
(Just (user, pass), Just baseurl) -> do
ctx <- mkDAVContext baseurl
let h = DavHandle ctx (toDavUser user) (toDavPass pass) baseurl
- return (Just h)
- _ -> return Nothing
+ return (Right h)
+ _ -> return $ Left "webdav credentials not available"
-withDavHandle :: DavHandleVar -> (Maybe DavHandle -> Annex a) -> Annex a
+withDavHandle :: DavHandleVar -> (DavHandle -> Annex a) -> Annex a
withDavHandle hv a = liftIO (readTVarIO hv) >>= \case
- Right hdl -> a hdl
+ Right hdl -> either giveup a hdl
Left mkhdl -> do
hdl <- mkhdl
liftIO $ atomically $ writeTVar hv (Right hdl)
- a hdl
+ either giveup a hdl
goDAV :: DavHandle -> DAVT IO a -> IO a
goDAV (DavHandle ctx user pass _) a = choke $ run $ prettifyExceptions $ do
@@ -472,7 +448,7 @@ prepDAV user pass = do
-- Legacy chunking code, to be removed eventually.
--
-storeLegacyChunked :: ChunkSize -> Key -> DavHandle -> L.ByteString -> IO Bool
+storeLegacyChunked :: ChunkSize -> Key -> DavHandle -> L.ByteString -> IO ()
storeLegacyChunked chunksize k dav b =
Legacy.storeChunks k tmp dest storer recorder finalizer
where
diff --git a/Test.hs b/Test.hs
index 7f79faf..0873bb1 100644
--- a/Test.hs
+++ b/Test.hs
@@ -1,6 +1,6 @@
{- git-annex test suite
-
- - Copyright 2010-2019 Joey Hess <id@joeyh.name>
+ - Copyright 2010-2020 Joey Hess <id@joeyh.name>
-
- Licensed under the GNU AGPL version 3 or higher.
-}
@@ -19,6 +19,7 @@ import Test.Tasty.Runners
import Test.Tasty.HUnit
import Test.Tasty.QuickCheck
import Test.Tasty.Ingredients.Rerun
+import Test.Tasty.Options
import Options.Applicative (switch, long, help, internal)
import qualified Data.Map as M
@@ -96,7 +97,7 @@ import qualified Types.Remote
optParser :: Parser TestOptions
optParser = TestOptions
- <$> suiteOptionParser ingredients (tests False True mempty)
+ <$> snd tastyParser
<*> switch
( long "keep-failures"
<> help "preserve repositories on test failure"
@@ -107,6 +108,15 @@ optParser = TestOptions
)
<*> cmdParams "non-options are for internal use only"
+tastyParser :: ([String], Parser Test.Tasty.Options.OptionSet)
+#if MIN_VERSION_tasty(1,3,0)
+tastyParser = go
+#else
+tastyParser = ([], go)
+#endif
+ where
+ go = suiteOptionParser ingredients (tests False True mempty)
+
runner :: TestOptions -> IO ()
runner opts
| fakeSsh opts = runFakeSsh (internalData opts)
@@ -118,6 +128,10 @@ runner opts
-- suite.
subenv = "GIT_ANNEX_TEST_SUBPROCESS"
runsubprocesstests Nothing = do
+ let warnings = fst tastyParser
+ unless (null warnings) $ do
+ hPutStrLn stderr "warnings from tasty:"
+ mapM_ (hPutStrLn stderr) warnings
pp <- Annex.Path.programPath
Utility.Env.Set.setEnv subenv "1" True
ps <- getArgs
diff --git a/Test/Framework.hs b/Test/Framework.hs
index b949839..0b523e5 100644
--- a/Test/Framework.hs
+++ b/Test/Framework.hs
@@ -582,9 +582,10 @@ backend_ :: String -> Types.Backend
backend_ = Backend.lookupBackendVariety . Types.Key.parseKeyVariety . encodeBS
getKey :: Types.Backend -> FilePath -> IO Types.Key
-getKey b f = fromJust <$> annexeval go
+getKey b f = case Types.Backend.getKey b of
+ Just a -> annexeval $ a ks Utility.Metered.nullMeterUpdate
+ Nothing -> error "internal"
where
- go = Types.Backend.getKey b ks Utility.Metered.nullMeterUpdate
ks = Types.KeySource.KeySource
{ Types.KeySource.keyFilename = toRawFilePath f
, Types.KeySource.contentLocation = toRawFilePath f
diff --git a/Types/Backend.hs b/Types/Backend.hs
index d1dfe61..a5d482e 100644
--- a/Types/Backend.hs
+++ b/Types/Backend.hs
@@ -17,7 +17,7 @@ import Utility.FileSystemEncoding
data BackendA a = Backend
{ backendVariety :: KeyVariety
- , getKey :: KeySource -> MeterUpdate -> a (Maybe Key)
+ , getKey :: Maybe (KeySource -> MeterUpdate -> a Key)
-- Verifies the content of a key.
, verifyKeyContent :: Maybe (Key -> FilePath -> a Bool)
-- Checks if a key can be upgraded to a better form.
diff --git a/Types/Remote.hs b/Types/Remote.hs
index 7e09f0b..a5efff1 100644
--- a/Types/Remote.hs
+++ b/Types/Remote.hs
@@ -84,22 +84,27 @@ data RemoteA a = Remote
-- Transfers a key's contents from disk to the remote.
-- The key should not appear to be present on the remote until
-- all of its contents have been transferred.
- , storeKey :: Key -> AssociatedFile -> MeterUpdate -> a Bool
+ -- Throws exception on failure.
+ , storeKey :: Key -> AssociatedFile -> MeterUpdate -> a ()
-- Retrieves a key's contents to a file.
-- (The MeterUpdate does not need to be used if it writes
-- sequentially to the file.)
- , retrieveKeyFile :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> a (Bool, Verification)
+ -- Throws exception on failure.
+ , retrieveKeyFile :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> a Verification
-- Retrieves a key's contents to a tmp file, if it can be done cheaply.
-- It's ok to create a symlink or hardlink.
- , retrieveKeyFileCheap :: Key -> AssociatedFile -> FilePath -> a Bool
+ -- Throws exception on failure.
+ , retrieveKeyFileCheap :: Maybe (Key -> AssociatedFile -> FilePath -> a ())
-- Security policy for reteiving keys from this remote.
, retrievalSecurityPolicy :: RetrievalSecurityPolicy
- -- Removes a key's contents (succeeds if the contents are not present)
- , removeKey :: Key -> a Bool
+ -- Removes a key's contents (succeeds even the contents are not present)
+ -- Can throw exception if unable to access remote, or if remote
+ -- refuses to remove the content.
+ , removeKey :: Key -> a ()
-- Uses locking to prevent removal of a key's contents,
-- thus producing a VerifiedCopy, which is passed to the callback.
-- If unable to lock, does not run the callback, and throws an
- -- error.
+ -- exception.
-- This is optional; remotes do not have to support locking.
, lockContent :: forall r. Maybe (Key -> (VerifiedCopy -> a r) -> a r)
-- Checks if a key is present in the remote.
@@ -144,7 +149,10 @@ data RemoteA a = Remote
, mkUnavailable :: a (Maybe (RemoteA a))
-- Information about the remote, for git annex info to display.
, getInfo :: a [(String, String)]
- -- Some remotes can download from an url (or uri).
+ -- Some remotes can download from an url (or uri). This asks the
+ -- remote if it can handle a particular url. The actual download
+ -- will be done using retrieveKeyFile, and the remote can look up
+ -- up the url to download for a key using Logs.Web.getUrls.
, claimUrl :: Maybe (URLString -> a Bool)
-- Checks that the url is accessible, and gets information about
-- its contents, without downloading the full content.
@@ -185,7 +193,7 @@ data Verification
-- ^ Content likely to have been altered during transfer,
-- verify even if verification is normally disabled
-unVerified :: Monad m => m Bool -> m (Bool, Verification)
+unVerified :: Monad m => m a -> m (a, Verification)
unVerified a = do
ok <- a
return (ok, UnVerified)
@@ -226,28 +234,39 @@ data ExportActions a = ExportActions
-- Exports content to an ExportLocation.
-- The exported file should not appear to be present on the remote
-- until all of its contents have been transferred.
- { storeExport :: FilePath -> Key -> ExportLocation -> MeterUpdate -> a Bool
+ -- Throws exception on failure.
+ { storeExport :: FilePath -> Key -> ExportLocation -> MeterUpdate -> a ()
-- Retrieves exported content to a file.
-- (The MeterUpdate does not need to be used if it writes
-- sequentially to the file.)
- , retrieveExport :: Key -> ExportLocation -> FilePath -> MeterUpdate -> a Bool
+ -- Throws exception on failure.
+ , retrieveExport :: Key -> ExportLocation -> FilePath -> MeterUpdate -> a ()
-- Removes an exported file (succeeds if the contents are not present)
- , removeExport :: Key -> ExportLocation -> a Bool
+ -- Can throw exception if unable to access remote, or if remote
+ -- refuses to remove the content.
+ , removeExport :: Key -> ExportLocation -> a ()
-- Removes an exported directory. Typically the directory will be
-- empty, but it could possibly contain files or other directories,
-- and it's ok to delete those (but not required to).
-- If the remote does not use directories, or automatically cleans
-- up empty directories, this can be Nothing.
+ --
-- Should not fail if the directory was already removed.
- , removeExportDirectory :: Maybe (ExportDirectory -> a Bool)
+ --
+ -- Throws exception if unable to contact the remote, or perhaps if
+ -- the remote refuses to let the directory be removed.
+ , removeExportDirectory :: Maybe (ExportDirectory -> a ())
-- Checks if anything is exported to the remote at the specified
-- ExportLocation.
-- Throws an exception if the remote cannot be accessed.
, checkPresentExport :: Key -> ExportLocation -> a Bool
-- Renames an already exported file.
- -- This may fail with False, if the file doesn't exist.
+ --
-- If the remote does not support renames, it can return Nothing.
- , renameExport :: Key -> ExportLocation -> ExportLocation -> a (Maybe Bool)
+ --
+ -- Throws an exception if the remote cannot be accessed, or
+ -- the file doesn't exist or cannot be renamed.
+ , renameExport :: Key -> ExportLocation -> ExportLocation -> a (Maybe ())
}
data ImportActions a = ImportActions
@@ -263,15 +282,17 @@ data ImportActions a = ImportActions
-- This has to be used rather than retrieveExport
-- when a special remote supports imports, since files on such a
-- special remote can be changed at any time.
+ --
+ -- Throws exception on failure.
, retrieveExportWithContentIdentifier
:: ExportLocation
-> ContentIdentifier
+ -- file to write content to
-> FilePath
- -- ^ file to write content to
- -> a (Maybe Key)
- -- ^ callback that generates a key from the downloaded content
+ -- callback that generates a key from the downloaded content
+ -> a Key
-> MeterUpdate
- -> a (Maybe Key)
+ -> a Key
-- Exports content to an ExportLocation, and returns the
-- ContentIdentifier corresponding to the content it stored.
--
@@ -288,14 +309,16 @@ data ImportActions a = ImportActions
-- needs to make sure that the ContentIdentifier it returns
-- corresponds to what it wrote, not to what some other writer
-- wrote.
+ --
+ -- Throws exception on failure.
, storeExportWithContentIdentifier
:: FilePath
-> Key
-> ExportLocation
+ -- old content that it's safe to overwrite
-> [ContentIdentifier]
- -- ^ old content that it's safe to overwrite
-> MeterUpdate
- -> a (Either String ContentIdentifier)
+ -> a ContentIdentifier
-- This is used rather than removeExport when a special remote
-- supports imports.
--
@@ -304,17 +327,22 @@ data ImportActions a = ImportActions
-- can recover an overwritten file.
--
-- It needs to handle races similar to storeExportWithContentIdentifier.
+ --
+ -- Throws an exception when unable to remove.
, removeExportWithContentIdentifier
:: Key
-> ExportLocation
-> [ContentIdentifier]
- -> a Bool
+ -> a ()
-- Removes a directory from the export, but only when it's empty.
-- Used instead of removeExportDirectory when a special remote
-- supports imports.
--
-- If the directory is not empty, it should succeed.
- , removeExportDirectoryWhenEmpty :: Maybe (ExportDirectory -> a Bool)
+ --
+ -- Throws exception if unable to contact the remote, or perhaps if
+ -- the remote refuses to let the directory be removed.
+ , removeExportDirectoryWhenEmpty :: Maybe (ExportDirectory -> a ())
-- Checks if the specified ContentIdentifier is exported to the
-- remote at the specified ExportLocation.
-- Throws an exception if the remote cannot be accessed.
diff --git a/Types/StoreRetrieve.hs b/Types/StoreRetrieve.hs
index e8faae1..ac28da7 100644
--- a/Types/StoreRetrieve.hs
+++ b/Types/StoreRetrieve.hs
@@ -12,10 +12,6 @@ import Utility.Metered
import qualified Data.ByteString.Lazy as L
--- Prepares for and then runs an action that will act on a Key's
--- content, passing it a helper when the preparation is successful.
-type Preparer helper = Key -> (Maybe helper -> Annex Bool) -> Annex Bool
-
-- A source of a Key's content.
data ContentSource
= FileContent FilePath
@@ -27,16 +23,17 @@ isByteContent (FileContent _) = False
-- Action that stores a Key's content on a remote.
-- Can throw exceptions.
-type Storer = Key -> ContentSource -> MeterUpdate -> Annex Bool
+type Storer = Key -> ContentSource -> MeterUpdate -> Annex ()
-- Action that retrieves a Key's content from a remote, passing it to a
-- callback, which will fully consume the content before returning.
-- Throws exception if key is not present, or remote is not accessible.
-type Retriever = Key -> MeterUpdate -> (ContentSource -> Annex Bool) -> Annex Bool
+type Retriever = Key -> MeterUpdate -> (ContentSource -> Annex ()) -> Annex ()
-- Action that removes a Key's content from a remote.
--- Succeeds if key is already not present; never throws exceptions.
-type Remover = Key -> Annex Bool
+-- Succeeds if key is already not present.
+-- Throws an exception if the remote is not accessible.
+type Remover = Key -> Annex ()
-- Checks if a Key's content is present on a remote.
-- Throws an exception if the remote is not accessible.
diff --git a/Types/UrlContents.hs b/Types/UrlContents.hs
index c68efd0..c2d2ca8 100644
--- a/Types/UrlContents.hs
+++ b/Types/UrlContents.hs
@@ -7,41 +7,14 @@
module Types.UrlContents (
UrlContents(..),
- SafeFilePath,
- mkSafeFilePath,
- fromSafeFilePath
) where
import Utility.Url
-import Utility.Path
-
-import System.FilePath
data UrlContents
-- An URL contains a file, whose size may be known.
-- There might be a nicer filename to use.
- = UrlContents (Maybe Integer) (Maybe SafeFilePath)
+ = UrlContents (Maybe Integer) (Maybe FilePath)
-- Sometimes an URL points to multiple files, each accessible
-- by their own URL.
- | UrlMulti [(URLString, Maybe Integer, SafeFilePath)]
-
--- This is a FilePath, from an untrusted source,
--- sanitized so it doesn't contain any directory traversal tricks
--- and is always relative. It can still contain subdirectories.
--- Any unusual characters are also filtered out.
-newtype SafeFilePath = SafeFilePath FilePath
- deriving (Show)
-
-mkSafeFilePath :: FilePath -> SafeFilePath
-mkSafeFilePath p = SafeFilePath $ if null p' then "file" else p'
- where
- p' = joinPath $ filter safe $ map sanitizeFilePath $ splitDirectories p
- safe s
- | isDrive s = False
- | s == ".." = False
- | s == ".git" = False
- | null s = False
- | otherwise = True
-
-fromSafeFilePath :: SafeFilePath -> FilePath
-fromSafeFilePath (SafeFilePath p) = p
+ | UrlMulti [(URLString, Maybe Integer, FilePath)]
diff --git a/Upgrade/V5.hs b/Upgrade/V5.hs
index 0e0368d..7ce6600 100644
--- a/Upgrade/V5.hs
+++ b/Upgrade/V5.hs
@@ -36,7 +36,7 @@ import Annex.AdjustedBranch
import qualified Data.ByteString as S
upgrade :: Bool -> Annex Bool
-upgrade automatic = flip catchNonAsync (const $ return False) $ do
+upgrade automatic = flip catchNonAsync onexception $ do
unless automatic $
showAction "v5 to v6"
ifM isDirect
@@ -55,6 +55,10 @@ upgrade automatic = flip catchNonAsync (const $ return False) $ do
unlessM isDirect $
createInodeSentinalFile True
return True
+ where
+ onexception e = do
+ warning $ "caught exception: " ++ show e
+ return False
-- git before 2.22 would OOM running git status on a large file.
--
diff --git a/Utility/CoProcess.hs b/Utility/CoProcess.hs
index 59c0064..e091d43 100644
--- a/Utility/CoProcess.hs
+++ b/Utility/CoProcess.hs
@@ -9,7 +9,7 @@
{-# LANGUAGE CPP #-}
module Utility.CoProcess (
- CoProcessHandle(..),
+ CoProcessHandle,
CoProcessState(..),
start,
stop,
diff --git a/Utility/Exception.hs b/Utility/Exception.hs
index bcadb78..273f844 100644
--- a/Utility/Exception.hs
+++ b/Utility/Exception.hs
@@ -39,7 +39,7 @@ import Utility.Data
{- Like error, this throws an exception. Unlike error, if this exception
- is not caught, it won't generate a backtrace. So use this for situations
- - where there's a problem that the user is excpected to see in some
+ - where there's a problem that the user is expeected to see in some
- circumstances. -}
giveup :: [Char] -> a
giveup = errorWithoutStackTrace
diff --git a/Utility/Format.hs b/Utility/Format.hs
index a2470fa..7c3de50 100644
--- a/Utility/Format.hs
+++ b/Utility/Format.hs
@@ -1,6 +1,6 @@
{- Formatted string handling.
-
- - Copyright 2010, 2011 Joey Hess <id@joeyh.name>
+ - Copyright 2010-2020 Joey Hess <id@joeyh.name>
-
- License: BSD-2-clause
-}
@@ -9,6 +9,7 @@ module Utility.Format (
Format,
gen,
format,
+ formatContainsVar,
decode_c,
encode_c,
prop_encode_c_decode_c_roundtrip
@@ -29,9 +30,14 @@ type FormatString = String
{- A format consists of a list of fragments. -}
type Format = [Frag]
-{- A fragment is either a constant string,
- - or a variable, with a justification. -}
-data Frag = Const String | Var String Justify
+{- A fragment is either a constant string, or a variable. -}
+data Frag
+ = Const String
+ | Var
+ { varName :: String
+ , varJustify :: Justify
+ , varEscaped :: Bool
+ }
deriving (Show)
data Justify = LeftJustified Int | RightJustified Int | UnJustified
@@ -45,10 +51,8 @@ format :: Format -> Variables -> String
format f vars = concatMap expand f
where
expand (Const s) = s
- expand (Var name j)
- | "escaped_" `isPrefixOf` name =
- justify j $ encode_c_strict $
- getvar $ drop (length "escaped_") name
+ expand (Var name j esc)
+ | esc = justify j $ encode_c_strict $ getvar name
| otherwise = justify j $ getvar name
getvar name = fromMaybe "" $ M.lookup name vars
justify UnJustified s = s
@@ -61,6 +65,8 @@ format f vars = concatMap expand f
- format string, such as "${foo} ${bar;10} ${baz;-10}\n"
-
- (This is the same type of format string used by dpkg-query.)
+ -
+ - Also, "${escaped_foo}" will apply encode_c to the value of variable foo.
-}
gen :: FormatString -> Format
gen = filter (not . empty) . fuse [] . scan [] . decode_c
@@ -94,12 +100,24 @@ gen = filter (not . empty) . fuse [] . scan [] . decode_c
| i < 0 = LeftJustified (-1 * i)
| otherwise = RightJustified i
novar v = "${" ++ reverse v
- foundvar f v p = scan (Var (reverse v) p : f)
+ foundvar f varname_r p =
+ let varname = reverse varname_r
+ var = if "escaped_" `isPrefixOf` varname
+ then Var (drop (length "escaped_") varname) p True
+ else Var varname p False
+ in scan (var : f)
empty :: Frag -> Bool
empty (Const "") = True
empty _ = False
+{- Check if a Format contains a variable with a specified name. -}
+formatContainsVar :: String -> Format -> Bool
+formatContainsVar v = any go
+ where
+ go (Var v' _ _) | v' == v = True
+ go _ = False
+
{- Decodes a C-style encoding, where \n is a newline (etc),
- \NNN is an octal encoded character, and \xNN is a hex encoded character.
-}
diff --git a/Utility/Path.hs b/Utility/Path.hs
index a8ab918..b66e127 100644
--- a/Utility/Path.hs
+++ b/Utility/Path.hs
@@ -1,6 +1,6 @@
{- path manipulation
-
- - Copyright 2010-2014 Joey Hess <id@joeyh.name>
+ - Copyright 2010-2020 Joey Hess <id@joeyh.name>
-
- License: BSD-2-clause
-}
@@ -24,7 +24,6 @@ module Utility.Path (
inPath,
searchPath,
dotfile,
- sanitizeFilePath,
splitShortExtensions,
prop_upFrom_basics,
@@ -35,7 +34,6 @@ module Utility.Path (
import System.FilePath
import Data.List
import Data.Maybe
-import Data.Char
import Control.Applicative
import Prelude
@@ -90,7 +88,7 @@ parentDir :: FilePath -> FilePath
parentDir = takeDirectory . dropTrailingPathSeparator
{- Just the parent directory of a path, or Nothing if the path has no
-- parent (ie for "/" or ".") -}
+- parent (ie for "/" or "." or "foo") -}
upFrom :: FilePath -> Maybe FilePath
upFrom dir
| length dirs < 2 = Nothing
@@ -276,22 +274,6 @@ dotfile file
where
f = takeFileName file
-{- Given a string that we'd like to use as the basis for FilePath, but that
- - was provided by a third party and is not to be trusted, returns the closest
- - sane FilePath.
- -
- - All spaces and punctuation and other wacky stuff are replaced
- - with '_', except for '.'
- - "../" will thus turn into ".._", which is safe.
- -}
-sanitizeFilePath :: String -> FilePath
-sanitizeFilePath = map sanitize
- where
- sanitize c
- | c == '.' = c
- | isSpace c || isPunctuation c || isSymbol c || isControl c || c == '/' = '_'
- | otherwise = c
-
{- Similar to splitExtensions, but knows that some things in FilePaths
- after a dot are too long to be extensions. -}
splitShortExtensions :: FilePath -> (FilePath, [String])
diff --git a/Utility/Url.hs b/Utility/Url.hs
index 196a2b1..763474f 100644
--- a/Utility/Url.hs
+++ b/Utility/Url.hs
@@ -244,12 +244,12 @@ getUrlInfo url uo = case parseURIRelaxed url of
<=< lookup hContentDisposition . responseHeaders
existsconduit r req =
- let go = catchcrossprotoredir r (existsconduit' req uo)
- in catchJust matchconnectionrestricted go retconnectionrestricted
+ let a = catchcrossprotoredir r (existsconduit' req uo)
+ in catchJust matchconnectionrestricted a retconnectionrestricted
matchconnectionrestricted he@(HttpExceptionRequest _ (InternalException ie)) =
case fromException ie of
- Just (ConnectionRestricted why) -> Just he
+ Just (ConnectionRestricted _why) -> Just he
_ -> Nothing
matchconnectionrestricted _ = Nothing
diff --git a/doc/git-annex-add.mdwn b/doc/git-annex-add.mdwn
index ce9cd8c..ea9d3c1 100644
--- a/doc/git-annex-add.mdwn
+++ b/doc/git-annex-add.mdwn
@@ -29,6 +29,13 @@ supporting symlinks.)
This command can also be used to add symbolic links, both symlinks to
annexed content, and other symlinks.
+# EXAMPLES
+
+ # git annex add foo bar
+ add foo ok
+ add bar ok
+ # git commit -m added
+
# OPTIONS
* `--force`
diff --git a/doc/git-annex-addurl.mdwn b/doc/git-annex-addurl.mdwn
index 224d230..68ff68b 100644
--- a/doc/git-annex-addurl.mdwn
+++ b/doc/git-annex-addurl.mdwn
@@ -57,6 +57,16 @@ be used to get better filenames.
If the file already exists, addurl will record that it can be downloaded
from the specified url(s).
+* `--preserve-filename`
+
+ When the web server (or torrent, etc) provides a filename, use it as-is,
+ avoiding sanitizing unusual characters, or truncating it to length, or any
+ other modifications.
+
+ git-annex will still check the filename for safety, and if the filename
+ has a security problem such as path traversal or an escape sequence,
+ it will refuse to add it.
+
* `--pathdepth=N`
Rather than basing the filename on the whole url, this causes a path to
diff --git a/doc/git-annex-drop.mdwn b/doc/git-annex-drop.mdwn
index 6b8415b..3b1f770 100644
--- a/doc/git-annex-drop.mdwn
+++ b/doc/git-annex-drop.mdwn
@@ -21,6 +21,19 @@ even if enough copies exist elsewhere. See [[git-annex-required]](1).
With no parameters, tries to drop all annexed files in the current directory.
Paths of files or directories to drop can be specified.
+# EXAMPLES
+
+ # git annex drop *.jpeg
+ drop photo1.jpg (checking origin...) ok
+ drop photo2.jpg (unsafe)
+ Could only verify the existence of 0 out of 1 necessary copies
+
+ Rather than dropping this file, try using: git annex move
+
+ (Use --force to override this check, or adjust numcopies.)
+ failed
+ drop photo3.jpg (checking origin...) ok
+
# OPTIONS
* `--from=remote`
diff --git a/doc/git-annex-get.mdwn b/doc/git-annex-get.mdwn
index abe5757..28a73aa 100644
--- a/doc/git-annex-get.mdwn
+++ b/doc/git-annex-get.mdwn
@@ -16,6 +16,16 @@ With no parameters, gets all annexed files in the current directory whose
content was not already present. Paths of files or directories to get can
be specified.
+# EXAMPLES
+
+ # evince foo.pdf
+ error: Unable to open document foo.pdf: No such file or directory
+ # ls foo.pdf
+ foo.pdf@
+ # git annex get foo.pdf
+ get foo.pdf (from origin..) ok
+ # evince foo.pdf
+
# OPTIONS
* `--auto`
diff --git a/doc/git-annex-init.mdwn b/doc/git-annex-init.mdwn
index 966b039..c11edd5 100644
--- a/doc/git-annex-init.mdwn
+++ b/doc/git-annex-init.mdwn
@@ -29,6 +29,15 @@ in a repository. This is useful for repositories that have a policy
reason not to use git-annex. The content of the file will be displayed
to the user who tries to run git-annex init.
+# EXAMPLES
+
+ # git annex add foo
+ git-annex: First run: git-annex init
+ # git annex init
+ init ok
+ # git annex add foo
+ add foo ok
+
# OPTIONS
* `--version=N`
diff --git a/doc/git-annex-lock.mdwn b/doc/git-annex-lock.mdwn
index c13654d..202faf6 100644
--- a/doc/git-annex-lock.mdwn
+++ b/doc/git-annex-lock.mdwn
@@ -1,6 +1,6 @@
# NAME
-git-annex lock - undo unlock command
+git-annex lock - lock files to prevent modification
# SYNOPSIS
@@ -8,8 +8,13 @@ git annex lock `[path ...]`
# DESCRIPTION
-Use this to undo an unlock command if you don't want to modify
-the files any longer, or have made modifications you want to discard.
+Lock the specified annexed files, to prevent them from being modified.
+When no files are specified, all annexed files in the current directory are
+locked.
+
+Locking a file changes how it is stored in the git repository (from a
+pointer file to a symlink), so this command will make a change that you
+can commit.
# OPTIONS
diff --git a/doc/git-annex-p2p.mdwn b/doc/git-annex-p2p.mdwn
index 060ce5d..c485f93 100644
--- a/doc/git-annex-p2p.mdwn
+++ b/doc/git-annex-p2p.mdwn
@@ -44,6 +44,9 @@ services.
Note that anyone who knows these addresses can access your
repository over the P2P networks.
+
+ This can be run repeatedly, in order to give different addresses
+ out to different people.
* `--link`
diff --git a/doc/git-annex-unlock.mdwn b/doc/git-annex-unlock.mdwn
index 3dea02b..b410c88 100644
--- a/doc/git-annex-unlock.mdwn
+++ b/doc/git-annex-unlock.mdwn
@@ -9,8 +9,8 @@ git annex unlock `[path ...]`
# DESCRIPTION
Normally, the content of annexed files is protected from being changed.
-Unlocking an annexed file allows it to be modified. This replaces the
-symlink for each specified file with the file's content.
+Unlocking an annexed file allows it to be modified. When no files are
+specified, all annexed files in the current directory are unlocked.
Unlocking a file changes how it is stored in the git repository (from a
symlink to a pointer file), so this command will make a change that you
@@ -32,6 +32,16 @@ system.) While this can save considerable disk space, any modification made
to a file will cause the old version of the file to be lost from the local
repository. So, enable annex.thin with care.
+# EXAMPLES
+
+ # git annex unlock disk-image
+ # git commit -m "unlocked to allow VM to make changes as it runs"
+
+ # git annex unlock photo.jpg
+ # gimp photo.jpg
+ # git annex lock photo.jpg
+ # git commit -m "redeye removal"
+
# OPTIONS
* file matching options
diff --git a/doc/git-annex-whereis.mdwn b/doc/git-annex-whereis.mdwn
index dd836fe..b6cdaf2 100644
--- a/doc/git-annex-whereis.mdwn
+++ b/doc/git-annex-whereis.mdwn
@@ -71,6 +71,32 @@ received from remotes.
Messages that would normally be output to standard error are included in
the json instead.
+* `--format=value`
+
+ Use custom output formatting.
+
+ The value is a format string, in which '${var}' is expanded to the
+ value of a variable. To right-justify a variable with whitespace,
+ use '${var;width}' ; to left-justify a variable, use '${var;-width}';
+ to escape unusual characters in a variable, use '${escaped_var}'
+
+ These variables are available for use in formats: file, key, uuid,
+ url, backend, bytesize, humansize, keyname, hashdirlower, hashdirmixed,
+ mtime (for the mtime field of a WORM key).
+
+ Also, '\\n' is a newline, '\\000' is a NULL, etc.
+
+ When the format contains the uuid variable, it will be expanded in turn
+ for each repository that contains the file content. For example,
+ with --format="${file} ${uuid}\\n", output will look like:
+
+ foo 00000000-0000-0000-0000-000000000001
+ foo a7f7ddd0-9a08-11ea-ab66-8358e4209d30
+ bar a7f7ddd0-9a08-11ea-ab66-8358e4209d30
+
+ The same applies when the url variable is used and a file has multiple
+ recorded urls.
+
# SEE ALSO
[[git-annex]](1)
diff --git a/doc/git-annex.mdwn b/doc/git-annex.mdwn
index 7925d2b..3703bcd 100644
--- a/doc/git-annex.mdwn
+++ b/doc/git-annex.mdwn
@@ -54,12 +54,6 @@ content from the key-value store.
# COMMONLY USED COMMANDS
-Like many git commands, git-annex can be passed a path that
-is either a file or a directory. In the latter case it acts on all relevant
-files in the directory. When no path is specified, most git-annex commands
-default to acting on all relevant files in the current directory (and
-subdirectories).
-
* `help`
Display built-in help.
@@ -68,8 +62,7 @@ subdirectories).
* `add [path ...]`
- Adds files in the path to the annex. If no path is specified, adds
- files from the current directory and below.
+ Adds files to the annex.
See [[git-annex-add]](1) for details.
@@ -1821,9 +1814,9 @@ These environment variables are used by git-annex when set:
variable can confuse git-annex's book-keeping, sometimes in ways that
`git annex fsck` is unable to repair.
-Some special remotes use additional environment variables
-for authentication etc. For example, `AWS_ACCESS_KEY_ID`
-and `GIT_ANNEX_P2P_AUTHTOKEN`. See special remote documentation.
+* Some special remotes use additional environment variables
+ for authentication etc. For example, `AWS_ACCESS_KEY_ID`
+ and `GIT_ANNEX_P2P_AUTHTOKEN`. See special remote documentation.
# FILES
diff --git a/git-annex.cabal b/git-annex.cabal
index 2ee9598..d702fd8 100644
--- a/git-annex.cabal
+++ b/git-annex.cabal
@@ -1,6 +1,6 @@
Name: git-annex
-Version: 8.20200501
-Cabal-Version: >= 1.8
+Version: 8.20200522
+Cabal-Version: >= 1.10
License: AGPL-3
Maintainer: Joey Hess <id@joeyh.name>
Author: Joey Hess
@@ -372,10 +372,11 @@ Executable git-annex
tasty-rerun
CC-Options: -Wall
GHC-Options: -Wall -fno-warn-tabs -Wincomplete-uni-patterns
- Extensions: PackageImports, LambdaCase
+ Default-Language: Haskell98
+ Default-Extensions: PackageImports, LambdaCase
+ Other-Extensions: TemplateHaskell
-- Some things don't work with the non-threaded RTS.
GHC-Options: -threaded
- Other-Extensions: TemplateHaskell
-- Fully optimize for production.
if flag(Production)
@@ -660,6 +661,7 @@ Executable git-annex
Annex.TaggedPush
Annex.Tmp
Annex.Transfer
+ Annex.UntrustedFilePath
Annex.UpdateInstead
Annex.UUID
Annex.Url