1
0
Fork 0
mirror of https://code.sup39.dev/repos/Wqawg synced 2024-12-28 22:54:50 +09:00

Darcs patch reader: Join adjacent remove-add sequences like in the Git module

This commit is contained in:
fr33domlover 2018-07-09 19:12:11 +00:00
parent 16c71b666f
commit 13bd369de3
2 changed files with 78 additions and 25 deletions

View file

@ -17,12 +17,17 @@ module Data.List.Local
( -- groupByFst
groupJusts
, groupEithers
, groupPairs
, groupMap
, groupMapBy
, groupMapBy1
)
where
import Prelude
import Data.List.NonEmpty (NonEmpty (..), (<|))
import Data.Function (on)
import Data.List.NonEmpty (NonEmpty (..), (<|), toList)
-- | Takes a list of pairs and groups them by consecutive ranges with equal
-- first element. Returns a list of pairs, where each pair corresponds to one
@ -54,3 +59,41 @@ groupEithers = foldr go ([], [], [])
go (Left x) ([] , (xs, ys):ps, as) = ([], (x <| xs, ys) : ps , as)
go (Left x) (b:bs, ps , as) = ([], (x :| [], b :| bs) : ps, as)
go (Right y) (bs, ps, as) = (y : bs, ps, as)
groupPairs
:: Foldable f => f ([a], [b]) -> ([b], [(NonEmpty a, NonEmpty b)], [a])
groupPairs = groupEithers . foldr go []
where
go (xs, ys) es = map Left xs ++ map Right ys ++ es
-- | @groupMap f g l@ groups elements like 'group', except it compares them by
-- applying @f@ to elements and comparing these values using the 'Eq' instance.
-- It then maps the elements in each such equality group using @g@.
--
-- >>> groupMap fst snd [(1, 5), (1, 6), (2, 7), (2, 8), (2, 9)]
-- [(1, [5, 6]), (2, [7, 8, 9])]
groupMap :: Eq b => (a -> b) -> (a -> c) -> [a] -> [(b, NonEmpty c)]
groupMap f = groupMapBy ((==) `on` f) f
-- | Like 'groupMap', except it uses a comparison predicate instead of an 'Eq'
-- instance.
groupMapBy
:: (a -> a -> Bool) -> (a -> b) -> (a -> c) -> [a] -> [(b, NonEmpty c)]
groupMapBy _ _ _ [] = []
groupMapBy eq f g (x:xs) = toList $ groupMapBy1 eq f g $ x :| xs
-- | Like 'groupMapBy1', but takes and returns a 'NonEmpty'.
groupMapBy1
:: (a -> a -> Bool)
-> (a -> b)
-> (a -> c)
-> NonEmpty a
-> NonEmpty (b, NonEmpty c)
groupMapBy1 eq f g = go
where
go (x :| xs) =
let (ys, zs) = span (eq x) xs
rest = case zs of
[] -> []
z:l -> toList $ go $ z :| l
in (f x, g x :| map g ys) :| rest

View file

@ -25,12 +25,14 @@ where
import Prelude hiding (lookup)
import Control.Applicative ((<|>))
import Control.Arrow (second)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Except (ExceptT (ExceptT), runExceptT)
import Darcs.Util.Path
import Darcs.Util.Tree
import Darcs.Util.Tree.Hashed
import Data.Bool (bool)
import Data.ByteString (ByteString)
import Data.List.NonEmpty (NonEmpty (..))
import Data.Maybe (listToMaybe, mapMaybe, fromMaybe)
import Data.Text (Text)
@ -51,6 +53,7 @@ import qualified Data.Attoparsec.Text as A
import qualified Data.ByteString.Lazy as BL (ByteString)
import qualified Data.ByteString.Base16 as B16 (encode, decode)
import qualified Data.Foldable as F (find)
import qualified Data.List.NonEmpty as N
import qualified Data.Text as T (unpack, takeWhile, stripEnd)
import qualified Data.Vector as V (empty)
import qualified Development.Darcs.Internal.Patch.Parser as P
@ -58,8 +61,10 @@ import qualified Development.Darcs.Internal.Patch.Parser as P
import Darcs.Local.Repository
import Data.Either.Local (maybeRight)
import Data.EventTime.Local
import Data.List.Local
import Data.Text.UTF8.Local (decodeStrict)
import Data.Time.Clock.Local ()
import Vervis.Changes
import Vervis.Foundation (Widget)
import Vervis.Patch
@ -227,6 +232,31 @@ lastChange path now = fmap maybeRight $ runExceptT $ do
FriendlyConvert $
now `diffUTCTime` piTime pi
-- | Group hunks by filename, assuming all the hunks for a given file are
-- placed together in the patch file, and in increasing line number order.
groupHunksByFile
:: [P.Hunk] -> [(ByteString, NonEmpty (Int, [ByteString], [ByteString]))]
groupHunksByFile = groupMap P.hunkFile rest
where
rest h = (P.hunkLine h, P.hunkRemove h, P.hunkAdd h)
-- | Find groups of consecutive sequences of removes and adds, and turn each
-- such group into a single hunk. This may not actually be necessary, because
-- the small consecutive hunks would be joined later anyway when adding context
-- lines, but I'm still doing this here for consistency. In the "Vervis.Git"
-- module, the hunks are joined like this too.
joinHunks
:: NonEmpty (Int, [ByteString], [ByteString])
-> NonEmpty (Bool, Int, Hunk)
joinHunks =
N.map (mkHunk . second groupPairs) .
groupMapBy1 consecutive lineNumber lines
where
consecutive (n1, r1, _) (n2, _, _) = n1 + length r1 == n2
lineNumber (n, _, _) = n
lines (_, rs, as) = (map decodeUtf8 rs, map decodeUtf8 as)
mkHunk (line, (adds, pairs, rems)) = (False, line, Hunk adds pairs rems)
-- | Read patch content, both metadata and the actual diff, from a given Darcs
-- repository. Preconditions:
--
@ -252,7 +282,8 @@ readPatch path hash = do
, patchTime = piTime pi
, patchTitle = piTitle pi
, patchDescription = fromMaybe "" $ piDescription pi
, patchDiff = map mkedit hunks
, patchDiff =
map (mkedit . second joinHunks) $ groupHunksByFile hunks
}
where
handle = either (const $ fail "readPatch failed") pure
@ -274,26 +305,5 @@ readPatch path hash = do
<* " <"
<*> (A.takeWhile1 (/= '>') >>= email)
<* A.skip (== '>')
mkhunk h = case P.hunkRemove h of
[] -> Hunk
{ hunkAddFirst = map decodeUtf8 $ P.hunkAdd h
, hunkRemoveAdd = []
, hunkRemoveLast = []
}
r:rs -> case P.hunkAdd h of
[] -> Hunk
{ hunkAddFirst = []
, hunkRemoveAdd = []
, hunkRemoveLast = map decodeUtf8 $ r : rs
}
a:as -> Hunk
{ hunkAddFirst = []
, hunkRemoveAdd = [(decodeUtf8 <$> r :| rs, decodeUtf8 <$> a :| as)]
, hunkRemoveLast = []
}
mkedit hunk =
EditTextFile
(T.unpack $ decodeUtf8 $ P.hunkFile hunk)
V.empty
((False, P.hunkLine hunk, mkhunk hunk) :| [])
0 0
mkedit (file, hunks) =
EditTextFile (T.unpack $ decodeUtf8 file) V.empty hunks 0 0